diff --git a/.env.example b/.env.example index f3e9502ae..ea03f6537 100644 --- a/.env.example +++ b/.env.example @@ -694,6 +694,15 @@ PLUGINS_ENABLED=true # Default: plugins/config.yaml PLUGIN_CONFIG_FILE=plugins/config.yaml +# Optional defaults for mTLS when connecting to external MCP plugins (STREAMABLEHTTP transport) +# Provide file paths inside the container. Plugin-specific TLS blocks override these defaults. +# PLUGINS_MTLS_CA_BUNDLE=/app/certs/plugins/ca.crt +# PLUGINS_MTLS_CLIENT_CERT=/app/certs/plugins/gateway-client.pem +# PLUGINS_MTLS_CLIENT_KEY=/app/certs/plugins/gateway-client.key +# PLUGINS_MTLS_CLIENT_KEY_PASSWORD= +# PLUGINS_MTLS_VERIFY=true +# PLUGINS_MTLS_CHECK_HOSTNAME=true + ##################################### # Well-Known URI Configuration ##################################### diff --git a/Makefile b/Makefile index 65e3bb148..8baf53861 100644 --- a/Makefile +++ b/Makefile @@ -141,12 +141,6 @@ update: check-env: @echo "🔎 Validating .env against .env.example using Python (prod)..." @python -m mcpgateway.scripts.validate_env .env.example - # @echo "🔎 Checking .env against .env.example..." -# @missing=0; \ -# for key in $$(grep -Ev '^\s*#|^\s*$$' .env.example | cut -d= -f1); do \ -# grep -q "^$$key=" .env || { echo "❌ Missing: $$key"; missing=1; }; \ -# done; \ -# if [ $$missing -eq 0 ]; then echo "✅ All environment variables are present."; fi # Validate .env in development mode (warnings do not fail) check-env-dev: @@ -164,11 +158,17 @@ check-env-dev: # help: certs-jwt - Generate JWT RSA keys in ./certs/jwt/ (idempotent) # help: certs-jwt-ecdsa - Generate JWT ECDSA keys in ./certs/jwt/ (idempotent) # help: certs-all - Generate both TLS certs and JWT keys (combo target) +# help: certs-mcp-ca - Generate MCP CA for plugin mTLS (./certs/mcp/ca/) +# help: certs-mcp-gateway - Generate gateway client certificate (./certs/mcp/gateway/) +# help: certs-mcp-plugin - Generate plugin server certificate (requires PLUGIN_NAME=name) +# help: certs-mcp-all - Generate complete MCP mTLS infrastructure (reads plugins from config.yaml) +# help: certs-mcp-check - Check expiry dates of MCP certificates # help: serve-ssl - Run Gunicorn behind HTTPS on :4444 (uses ./certs) # help: dev - Run fast-reload dev server (uvicorn) # help: run - Execute helper script ./run.sh -.PHONY: serve serve-ssl dev run certs certs-jwt certs-jwt-ecdsa certs-all +.PHONY: serve serve-ssl dev run certs certs-jwt certs-jwt-ecdsa certs-all \ + certs-mcp-ca certs-mcp-gateway certs-mcp-plugin certs-mcp-all certs-mcp-check ## --- Primary servers --------------------------------------------------------- serve: @@ -231,6 +231,142 @@ certs-all: certs certs-jwt ## Generate both TLS certificates and JWT RSA k @echo "📁 JWT: ./certs/jwt/{private,public}.pem" @echo "💡 Use JWT_ALGORITHM=RS256 with JWT_PUBLIC_KEY_PATH=certs/jwt/public.pem" +## --- MCP Plugin mTLS Certificate Management ---------------------------------- +# Default validity period for MCP certificates (in days) +MCP_CERT_DAYS ?= 825 + +# Plugin configuration file for automatic certificate generation +MCP_PLUGIN_CONFIG ?= plugins/external/config.yaml + +certs-mcp-ca: ## Generate CA for MCP plugin mTLS + @if [ -f certs/mcp/ca/ca.key ] && [ -f certs/mcp/ca/ca.crt ]; then \ + echo "🔐 Existing MCP CA found in ./certs/mcp/ca - skipping generation."; \ + echo "⚠️ To regenerate, delete ./certs/mcp/ca and run again."; \ + else \ + echo "🔐 Generating MCP Certificate Authority ($(MCP_CERT_DAYS) days validity)..."; \ + mkdir -p certs/mcp/ca; \ + openssl genrsa -out certs/mcp/ca/ca.key 4096; \ + openssl req -new -x509 -key certs/mcp/ca/ca.key -out certs/mcp/ca/ca.crt \ + -days $(MCP_CERT_DAYS) \ + -subj "/CN=MCP-Gateway-CA/O=MCPGateway/OU=Plugins"; \ + echo "01" > certs/mcp/ca/ca.srl; \ + echo "✅ MCP CA created: ./certs/mcp/ca/ca.{key,crt}"; \ + fi + @chmod 600 certs/mcp/ca/ca.key + @chmod 644 certs/mcp/ca/ca.crt + @echo "🔒 Permissions set: ca.key (600), ca.crt (644)" + +certs-mcp-gateway: certs-mcp-ca ## Generate gateway client certificate + @if [ -f certs/mcp/gateway/client.key ] && [ -f certs/mcp/gateway/client.crt ]; then \ + echo "🔐 Existing gateway client certificate found - skipping generation."; \ + else \ + echo "🔐 Generating gateway client certificate ($(MCP_CERT_DAYS) days)..."; \ + mkdir -p certs/mcp/gateway; \ + openssl genrsa -out certs/mcp/gateway/client.key 4096; \ + openssl req -new -key certs/mcp/gateway/client.key \ + -out certs/mcp/gateway/client.csr \ + -subj "/CN=mcp-gateway-client/O=MCPGateway/OU=Gateway"; \ + openssl x509 -req -in certs/mcp/gateway/client.csr \ + -CA certs/mcp/ca/ca.crt -CAkey certs/mcp/ca/ca.key \ + -CAcreateserial -out certs/mcp/gateway/client.crt \ + -days $(MCP_CERT_DAYS) -sha256; \ + rm certs/mcp/gateway/client.csr; \ + cp certs/mcp/ca/ca.crt certs/mcp/gateway/ca.crt; \ + echo "✅ Gateway client certificate created: ./certs/mcp/gateway/"; \ + fi + @chmod 600 certs/mcp/gateway/client.key + @chmod 644 certs/mcp/gateway/client.crt certs/mcp/gateway/ca.crt + @echo "🔒 Permissions set: client.key (600), client.crt (644), ca.crt (644)" + +certs-mcp-plugin: certs-mcp-ca ## Generate plugin server certificate (PLUGIN_NAME=name) + @if [ -z "$(PLUGIN_NAME)" ]; then \ + echo "❌ ERROR: PLUGIN_NAME not set"; \ + echo "💡 Usage: make certs-mcp-plugin PLUGIN_NAME=my-plugin"; \ + exit 1; \ + fi + @if [ -f certs/mcp/plugins/$(PLUGIN_NAME)/server.key ] && \ + [ -f certs/mcp/plugins/$(PLUGIN_NAME)/server.crt ]; then \ + echo "🔐 Existing certificate for plugin '$(PLUGIN_NAME)' found - skipping."; \ + else \ + echo "🔐 Generating server certificate for plugin '$(PLUGIN_NAME)' ($(MCP_CERT_DAYS) days)..."; \ + mkdir -p certs/mcp/plugins/$(PLUGIN_NAME); \ + openssl genrsa -out certs/mcp/plugins/$(PLUGIN_NAME)/server.key 4096; \ + openssl req -new -key certs/mcp/plugins/$(PLUGIN_NAME)/server.key \ + -out certs/mcp/plugins/$(PLUGIN_NAME)/server.csr \ + -subj "/CN=mcp-plugin-$(PLUGIN_NAME)/O=MCPGateway/OU=Plugins"; \ + openssl x509 -req -in certs/mcp/plugins/$(PLUGIN_NAME)/server.csr \ + -CA certs/mcp/ca/ca.crt -CAkey certs/mcp/ca/ca.key \ + -CAcreateserial -out certs/mcp/plugins/$(PLUGIN_NAME)/server.crt \ + -days $(MCP_CERT_DAYS) -sha256 \ + -extfile <(printf "subjectAltName=DNS:$(PLUGIN_NAME),DNS:mcp-plugin-$(PLUGIN_NAME),DNS:localhost"); \ + rm certs/mcp/plugins/$(PLUGIN_NAME)/server.csr; \ + cp certs/mcp/ca/ca.crt certs/mcp/plugins/$(PLUGIN_NAME)/ca.crt; \ + echo "✅ Plugin '$(PLUGIN_NAME)' certificate created: ./certs/mcp/plugins/$(PLUGIN_NAME)/"; \ + fi + @chmod 600 certs/mcp/plugins/$(PLUGIN_NAME)/server.key + @chmod 644 certs/mcp/plugins/$(PLUGIN_NAME)/server.crt certs/mcp/plugins/$(PLUGIN_NAME)/ca.crt + @echo "🔒 Permissions set: server.key (600), server.crt (644), ca.crt (644)" + +certs-mcp-all: certs-mcp-ca certs-mcp-gateway ## Generate complete mTLS infrastructure + @echo "🔐 Generating certificates for plugins..." + @# Read plugin names from config file if it exists + @if [ -f "$(MCP_PLUGIN_CONFIG)" ]; then \ + echo "📋 Reading plugin names from $(MCP_PLUGIN_CONFIG)"; \ + python3 -c "import yaml; \ + config = yaml.safe_load(open('$(MCP_PLUGIN_CONFIG)')); \ + plugins = [p['name'] for p in config.get('plugins', []) if p.get('kind') == 'external']; \ + print('\n'.join(plugins))" 2>/dev/null | while read plugin_name; do \ + if [ -n "$$plugin_name" ]; then \ + echo " Generating for: $$plugin_name"; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME="$$plugin_name"; \ + fi; \ + done || echo "⚠️ PyYAML not installed or config parse failed, generating example plugins..."; \ + fi + @# Fallback to example plugins if no config or parsing failed + @if [ ! -f "$(MCP_PLUGIN_CONFIG)" ] || ! python3 -c "import yaml" 2>/dev/null; then \ + echo "🔐 Generating certificates for example plugins..."; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME=example-plugin-a; \ + $(MAKE) certs-mcp-plugin PLUGIN_NAME=example-plugin-b; \ + fi + @echo "" + @echo "🎯 MCP mTLS infrastructure generated successfully!" + @echo "📁 Structure:" + @echo " certs/mcp/ca/ - Certificate Authority" + @echo " certs/mcp/gateway/ - Gateway client certificate" + @echo " certs/mcp/plugins/*/ - Plugin server certificates" + @echo "" + @echo "💡 Generate additional plugin certificates with:" + @echo " make certs-mcp-plugin PLUGIN_NAME=your-plugin-name" + @echo "" + @echo "💡 Certificate validity: $(MCP_CERT_DAYS) days" + @echo " To change: make certs-mcp-all MCP_CERT_DAYS=365" + +certs-mcp-check: ## Check expiry dates of MCP certificates + @echo "🔍 Checking MCP certificate expiry dates..." + @echo "" + @if [ -f certs/mcp/ca/ca.crt ]; then \ + echo "📋 CA Certificate:"; \ + openssl x509 -in certs/mcp/ca/ca.crt -noout -enddate | sed 's/notAfter=/ Expires: /'; \ + echo ""; \ + fi + @if [ -f certs/mcp/gateway/client.crt ]; then \ + echo "📋 Gateway Client Certificate:"; \ + openssl x509 -in certs/mcp/gateway/client.crt -noout -enddate | sed 's/notAfter=/ Expires: /'; \ + echo ""; \ + fi + @if [ -d certs/mcp/plugins ]; then \ + echo "📋 Plugin Certificates:"; \ + for plugin_dir in certs/mcp/plugins/*; do \ + if [ -f "$$plugin_dir/server.crt" ]; then \ + plugin_name=$$(basename "$$plugin_dir"); \ + expiry=$$(openssl x509 -in "$$plugin_dir/server.crt" -noout -enddate | sed 's/notAfter=//'); \ + echo " $$plugin_name: $$expiry"; \ + fi; \ + done; \ + echo ""; \ + fi + @echo "💡 To regenerate expired certificates, delete the cert directory and run make certs-mcp-all" + ## --- House-keeping ----------------------------------------------------------- # help: clean - Remove caches, build artefacts, virtualenv, docs, certs, coverage, SBOM, database files, etc. .PHONY: clean diff --git a/README.md b/README.md index 6acf4805d..46015c100 100644 --- a/README.md +++ b/README.md @@ -1623,6 +1623,12 @@ MCP Gateway uses Alembic for database migrations. Common commands: | ------------------------------ | ------------------------------------------------ | --------------------- | ------- | | `PLUGINS_ENABLED` | Enable the plugin framework | `false` | bool | | `PLUGIN_CONFIG_FILE` | Path to main plugin configuration file | `plugins/config.yaml` | string | +| `PLUGINS_MTLS_CA_BUNDLE` | (Optional) default CA bundle for external plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_CERT` | (Optional) gateway client certificate for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY` | (Optional) gateway client key for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY_PASSWORD` | (Optional) password for plugin client key | _(empty)_ | string | +| `PLUGINS_MTLS_VERIFY` | (Optional) verify remote plugin certificates (`true`/`false`) | `true` | bool | +| `PLUGINS_MTLS_CHECK_HOSTNAME` | (Optional) enforce hostname verification for plugins | `true` | bool | | `PLUGINS_CLI_COMPLETION` | Enable auto-completion for plugins CLI | `false` | bool | | `PLUGINS_CLI_MARKUP_MODE` | Set markup mode for plugins CLI | (none) | `rich`, `markdown`, `disabled` | diff --git a/charts/mcp-stack/values.yaml b/charts/mcp-stack/values.yaml index 3b4725e32..5e0409c21 100644 --- a/charts/mcp-stack/values.yaml +++ b/charts/mcp-stack/values.yaml @@ -267,6 +267,12 @@ mcpContextForge: # ─ Plugin Configuration ─ PLUGINS_ENABLED: "false" # enable the plugin framework PLUGIN_CONFIG_FILE: "plugins/config.yaml" # path to main plugin configuration file + PLUGINS_MTLS_CA_BUNDLE: "" # default CA bundle for external plugins (optional) + PLUGINS_MTLS_CLIENT_CERT: "" # gateway client certificate for plugin mTLS + PLUGINS_MTLS_CLIENT_KEY: "" # gateway client key for plugin mTLS (optional) + PLUGINS_MTLS_CLIENT_KEY_PASSWORD: "" # password for the plugin client key (optional) + PLUGINS_MTLS_VERIFY: "true" # verify remote plugin certificates + PLUGINS_MTLS_CHECK_HOSTNAME: "true" # enforce hostname verification when verifying certs PLUGINS_CLI_COMPLETION: "false" # enable auto-completion for plugins CLI PLUGINS_CLI_MARKUP_MODE: "" # set markup mode for plugins CLI diff --git a/docs/docs/deployment/.pages b/docs/docs/deployment/.pages index f7e568e00..2e093a6ad 100644 --- a/docs/docs/deployment/.pages +++ b/docs/docs/deployment/.pages @@ -14,3 +14,4 @@ nav: - azure.md - fly-io.md - proxy-auth.md + - cforge-gateway.md diff --git a/docs/docs/deployment/cforge-gateway.md b/docs/docs/deployment/cforge-gateway.md new file mode 100644 index 000000000..042a0d2ab --- /dev/null +++ b/docs/docs/deployment/cforge-gateway.md @@ -0,0 +1,2155 @@ +# cforge gateway - Deployment Tool + +## Overview + +The `cforge gateway` command is a powerful deployment tool for MCP Gateway and its external plugins. It provides a unified, declarative way to build, configure, and deploy the complete MCP stack from a single YAML configuration file. + +### Why We Created It + +Before `cforge gateway`, deploying MCP Gateway with external plugins required: + +- **Manual container builds** for each plugin from different repositories +- **Complex mTLS certificate generation** and distribution +- **Hand-crafted Kubernetes manifests** or Docker Compose files +- **Environment variable management** across multiple services +- **Coordination** between gateway configuration and plugin deployments + +`cforge gateway` solves these challenges by: + +✅ **Automating the entire deployment pipeline** from source to running services +✅ **Managing mTLS certificates** automatically with proper distribution +✅ **Generating deployment manifests** (Kubernetes or Docker Compose) from a single source +✅ **Supporting multiple build modes** (Dagger for performance, plain Python for portability) +✅ **Validating configurations** before deployment +✅ **Integrating with CI/CD** workflows and secret management + +--- + +## Features + +### Build System + +- **Dual-mode execution**: Dagger (optimal performance) or plain Python (fallback) +- **Git-based plugin builds**: Clone and build plugins from any Git repository +- **Pre-built image support**: Use existing Docker images +- **Multi-stage build support**: Build specific stages from Dockerfiles +- **Build caching**: Intelligent caching to speed up rebuilds + +### Deployment Targets + +- **Kubernetes**: Full manifest generation with ConfigMaps, Secrets, Services, Deployments +- **Docker Compose**: Complete stack with networking and volume management +- **Local development**: Quick testing with exposed ports +- **Production-ready**: Resource limits, health checks, and best practices + +### Security + +- **Automatic mTLS**: Generate and distribute certificates for gateway ↔ plugin communication +- **Certificate rotation**: Configurable validity periods +- **Secret management**: Integration with environment files and CI/CD vaults +- **Network isolation**: Proper service-to-service communication + +### Workflow Automation + +- **Validation**: Pre-flight checks before deployment +- **Build**: Build containers from source or pull pre-built images +- **Certificate generation**: Create mTLS cert hierarchy +- **Deployment**: Apply manifests to target environment +- **Verification**: Health check deployed services +- **Destruction**: Clean teardown + +--- + +## Future Directions + +The `cforge gateway` tool is actively evolving to support broader MCP ecosystem workflows. Planned enhancements include: + +### MCP Server Lifecycle Management + +Currently, `cforge gateway` focuses on deploying external plugins. Future versions will support the complete lifecycle of MCP servers: + +- **Build & Deploy MCP Servers**: Build MCP servers from Git repositories, similar to current plugin support +- **Automatic Registration**: Deploy MCP servers and automatically register them with the gateway as peers +- **Plugin Attachment**: Attach and configure plugins for registered MCP servers, enabling policy enforcement and filtering at the server level +- **Configuration Generation**: Generate MCP server configurations from templates +- **Multi-Server Deployments**: Deploy multiple MCP servers as a coordinated fleet + +This will enable declarative deployment of complete MCP ecosystems from a single configuration file: + +```yaml +# Future concept +mcp_servers: + - name: GitHubMCPServer + repo: https://github.com/org/mcp-server-github.git + auto_register: true # Auto-register as gateway peer + expose_tools: ["*"] # Expose all tools through gateway + expose_resources: ["repos"] # Expose specific resources + + # Attach plugins to this MCP server + plugins: + - OPAPluginFilter # Apply OPA policies to this server + - PIIFilterPlugin # Filter PII from responses +``` + +### Live MCP Server Discovery + +Automatic discovery and registration of running MCP servers: + +- **mDNS/Zeroconf Discovery**: Automatically discover MCP servers on the local network +- **Service Mesh Integration**: Integrate with Kubernetes service discovery +- **Dynamic Registration**: Register servers at runtime without redeployment +- **Health-Based Registration**: Automatically register/deregister based on health checks + +### Container Security Policies + +Attach security policies to built containers for enhanced compliance and governance: + +- **OPA Policy Bundles**: Include Open Policy Agent (OPA) policies with container builds +- **SBOM Generation**: Automatically generate Software Bill of Materials (SBOM) for built images +- **Vulnerability Scanning**: Integrate Trivy/Grype scans into build pipeline +- **Policy Enforcement**: Define and enforce security policies (allowed packages, CVE thresholds, etc.) +- **Signing & Attestation**: Sign built images with Cosign/Sigstore +- **Runtime Security**: Define AppArmor/SELinux profiles for deployed containers + +Example future configuration: + +```yaml +# Future concept +security: + policies: + enabled: true + opa_bundle: ./policies/container-security.rego + sbom: true + vulnerability_scan: + enabled: true + fail_on: critical + allowlist: ["CVE-2024-1234"] + signing: + enabled: true + keyless: true # Sigstore keyless signing +``` + +These enhancements will make `cforge gateway` a comprehensive tool for building, securing, deploying, and managing the entire MCP infrastructure stack. + +--- + +## Quick Start + +### Installation + +The `cforge` CLI is installed with the MCP Gateway package: + +```bash +pip install -e . +``` + +Verify installation: + +```bash +cforge --help +cforge gateway --help +``` + +### Basic Workflow + +```bash +# 1. Validate your configuration +cforge gateway validate examples/deployment-configs/deploy-compose.yaml + +# 2. Build containers (if building from source) +cforge gateway build examples/deployment-configs/deploy-compose.yaml + +# 3. Generate mTLS certificates (if needed) +cforge gateway certs examples/deployment-configs/deploy-compose.yaml + +# 4. Deploy the stack +cforge gateway deploy examples/deployment-configs/deploy-compose.yaml + +# 5. Verify deployment health +cforge gateway verify examples/deployment-configs/deploy-compose.yaml + +# 6. (Optional) Tear down +cforge gateway destroy examples/deployment-configs/deploy-compose.yaml +``` + +--- + +## Commands + +### `cforge gateway validate` + +Validates the deployment configuration file without making any changes. + +```bash +cforge gateway validate +``` + +**Example:** +```bash +cforge gateway validate deploy.yaml +``` + +**Output:** +- ✅ Configuration syntax validation +- ✅ Plugin name uniqueness check +- ✅ Required field verification +- ✅ Build configuration validation (image XOR repo) + +--- + +### `cforge gateway build` + +Builds container images for gateway and/or plugins from source repositories. + +```bash +cforge gateway build [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--plugins-only` | Only build plugin containers, skip gateway | `false` | +| `--plugin NAME`, `-p NAME` | Build specific plugin(s) only (can specify multiple) | All plugins | +| `--no-cache` | Disable Docker build cache | `false` | +| `--copy-env-templates` | Copy `.env.template` files from plugin repos | `true` | + +**Examples:** +```bash +# Build everything +cforge gateway build deploy.yaml + +# Build only plugins +cforge gateway build deploy.yaml --plugins-only + +# Build specific plugin +cforge gateway build deploy.yaml --plugin OPAPluginFilter + +# Build multiple plugins with no cache +cforge gateway build deploy.yaml --plugin OPAPluginFilter --plugin LLMGuardPlugin --no-cache +``` + +**What it does:** +1. Clones Git repositories (if `repo` specified) +2. Checks out specified branch/tag/commit (`ref`) +3. Builds Docker images from `containerfile` in `context` directory +4. Tags images appropriately for deployment +5. Copies `.env.template` files to `deploy/env/` for customization + +--- + +### `cforge gateway certs` + +Generates mTLS certificate hierarchy for secure gateway ↔ plugin communication. + +```bash +cforge gateway certs +``` + +**Example:** +```bash +cforge gateway certs deploy.yaml +``` + +**What it generates:** +``` +certs/mcp/ +├── ca/ +│ ├── ca.crt # Root CA certificate +│ └── ca.key # Root CA private key +├── gateway/ +│ ├── client.crt # Gateway client certificate +│ ├── client.key # Gateway client private key +│ └── ca.crt # CA cert (for verification) +└── plugins/ + ├── PluginName1/ + │ ├── server.crt # Plugin server certificate + │ ├── server.key # Plugin server private key + │ └── ca.crt # CA cert (for verification) + └── PluginName2/ + ├── server.crt + ├── server.key + └── ca.crt +``` + +**Certificate Properties:** +- Validity: Configurable (default: 825 days) +- CN for gateway: `mcp-gateway` +- CN for plugins: `mcp-plugin-{PluginName}` +- SANs: `{PluginName}, mcp-plugin-{PluginName}, localhost` + +--- + +### `cforge gateway deploy` + +Deploys the complete MCP stack to the target environment. + +```bash +cforge gateway deploy [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--output-dir DIR`, `-o DIR` | Custom output directory for manifests | `deploy/` | +| `--dry-run` | Generate manifests without deploying | `false` | +| `--skip-build` | Skip container build step | `false` | +| `--skip-certs` | Skip certificate generation | `false` | + +**Examples:** +```bash +# Full deployment +cforge gateway deploy deploy.yaml + +# Dry-run (generate manifests only) +cforge gateway deploy deploy.yaml --dry-run + +# Deploy with existing images and certs +cforge gateway deploy deploy.yaml --skip-build --skip-certs + +# Custom output directory +cforge gateway deploy deploy.yaml --output-dir ./my-deployment +``` + +**Deployment Process:** +1. **Validate** configuration +2. **Build** containers (unless `--skip-build`) +3. **Generate certificates** (unless `--skip-certs` or already exist) +4. **Generate manifests** (Kubernetes or Docker Compose) +5. **Apply** to target environment: + - **Kubernetes**: `kubectl apply -f` + - **Docker Compose**: `docker-compose up -d` + +**Generated Files:** +``` +deploy/ +├── env/ # Environment files +│ ├── .env.gateway +│ ├── .env.PluginName1 +│ └── .env.PluginName2 +├── manifests/ # Kubernetes OR +│ ├── namespace.yaml +│ ├── configmaps.yaml +│ ├── secrets.yaml +│ ├── gateway-deployment.yaml +│ ├── gateway-service.yaml +│ ├── plugin-deployments.yaml +│ └── plugin-services.yaml +└── docker-compose.yaml # Docker Compose +``` + +--- + +### `cforge gateway verify` + +Verifies that the deployed stack is healthy and running. + +```bash +cforge gateway verify [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--wait` | Wait for deployment to be ready | `true` | +| `--timeout SECONDS` | Wait timeout in seconds | `300` | + +**Examples:** +```bash +# Verify deployment (wait up to 5 minutes) +cforge gateway verify deploy.yaml + +# Quick check without waiting +cforge gateway verify deploy.yaml --no-wait + +# Custom timeout +cforge gateway verify deploy.yaml --timeout 600 +``` + +**Checks:** +- Container/pod readiness +- Health endpoint responses +- Service connectivity +- mTLS handshake (if enabled) + +--- + +### `cforge gateway destroy` + +Tears down the deployed MCP stack. + +```bash +cforge gateway destroy [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--force` | Skip confirmation prompt | `false` | + +**Examples:** +```bash +# Destroy with confirmation +cforge gateway destroy deploy.yaml + +# Force destroy without prompt +cforge gateway destroy deploy.yaml --force +``` + +**What it removes:** +- **Kubernetes**: Deletes all resources in namespace +- **Docker Compose**: Stops and removes containers, networks, volumes + +⚠️ **Note:** This does NOT delete generated certificates or build artifacts. To clean those: +```bash +rm -rf certs/ deploy/ +``` + +--- + +### `cforge gateway generate` + +Generates deployment manifests without deploying them. + +```bash +cforge gateway generate [OPTIONS] +``` + +**Options:** + +| Option | Description | Default | +|--------|-------------|---------| +| `--output DIR`, `-o DIR` | Output directory for manifests | `deploy/` | + +**Examples:** +```bash +# Generate manifests +cforge gateway generate deploy.yaml + +# Custom output directory +cforge gateway generate deploy.yaml --output ./manifests +``` + +**Use cases:** +- GitOps workflows (commit generated manifests) +- Manual review before deployment +- Integration with external deployment tools +- CI/CD pipeline artifact generation + +--- + +### `cforge gateway version` + +Shows version and runtime information. + +```bash +cforge gateway version +``` + +**Output:** +``` +┌─ Version Info ─────────────────┐ +│ MCP Deploy │ +│ Version: 1.0.0 │ +│ Mode: dagger │ +│ Environment: local │ +└────────────────────────────────┘ +``` + +--- + +## Global Options + +These options apply to all commands: + +| Option | Description | Default | +|--------|-------------|---------| +| `--dagger` | Enable Dagger mode (auto-downloads CLI if needed) | `false` (uses plain Python) | +| `--verbose`, `-v` | Verbose output | `false` | + +**Examples:** +```bash +# Use plain Python mode (default) +cforge gateway deploy deploy.yaml + +# Enable Dagger mode for optimized builds +cforge gateway --dagger deploy deploy.yaml + +# Verbose mode +cforge gateway -v build deploy.yaml + +# Combine options +cforge gateway --dagger -v deploy deploy.yaml +``` + +--- + +## Configuration Reference + +### Deployment Configuration + +Top-level deployment settings: + +```yaml +deployment: + type: kubernetes | compose # Required: Deployment target + project_name: my-project # Docker Compose only + namespace: mcp-gateway # Kubernetes only + container_engine: podman | docker # Container runtime (auto-detected if not specified) + + # OpenShift-specific configuration (optional) + openshift: + create_routes: true # Create OpenShift Route resources + domain: apps-crc.testing # OpenShift apps domain (auto-detected if omitted) + tls_termination: edge # TLS termination mode: edge, passthrough, or reencrypt +``` + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `type` | string | ✅ | Deployment type: `kubernetes` or `compose` | - | +| `project_name` | string | ❌ | Docker Compose project name | - | +| `namespace` | string | ❌ | Kubernetes namespace | - | +| `container_engine` | string | ❌ | Container runtime: `docker` or `podman` | Auto-detected | +| `openshift` | object | ❌ | OpenShift-specific configuration (see below) | - | + +#### OpenShift Configuration + +OpenShift Routes provide native external access to services, with built-in TLS termination and integration with OpenShift's router/HAProxy infrastructure. + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `create_routes` | boolean | ❌ | Create OpenShift Route resources for external access | `false` | +| `domain` | string | ❌ | OpenShift apps domain for route hostnames | Auto-detected from cluster | +| `tls_termination` | string | ❌ | TLS termination mode: `edge`, `passthrough`, or `reencrypt` | `edge` | + +**Example:** +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-test + openshift: + create_routes: true + domain: apps-crc.testing + tls_termination: edge +``` + +When `create_routes: true`, the tool generates an OpenShift Route for the gateway: +- **Host**: `mcpgateway-admin-{namespace}.{domain}` +- **Path**: `/` +- **TLS**: Edge termination (default) +- **Target**: Gateway service on HTTP port + +**Access the gateway:** +```bash +# OpenShift Local (CRC) example +https://mcpgateway-admin-mcp-gateway-test.apps-crc.testing +``` + +**Domain auto-detection:** +If `domain` is not specified, the tool attempts to auto-detect the OpenShift apps domain from the cluster: +```bash +kubectl get ingresses.config.openshift.io cluster -o jsonpath='{.spec.domain}' +``` + +If auto-detection fails, it defaults to `apps-crc.testing` (OpenShift Local). + +--- + +### Gateway Configuration + +Gateway server settings: + +```yaml +gateway: + # Build Configuration (choose ONE) + image: mcpgateway/mcpgateway:latest # Pre-built image + # OR + repo: https://github.com/org/repo.git # Build from source + ref: main # Git branch/tag/commit + context: . # Build context directory + containerfile: Containerfile # Dockerfile path + target: production # Multi-stage build target + + # Runtime Configuration + port: 4444 # Internal port + host_port: 4444 # Host port mapping (compose only) + + # mTLS Client Configuration (gateway → plugins) + mtls_enabled: true # Enable mTLS + mtls_verify: true # Verify server certs + mtls_check_hostname: false # Verify hostname + + # Container Registry Configuration (optional) + registry: + enabled: true # Enable registry push + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org + push: true # Push after build + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy + + # Environment Variables + env_vars: + LOG_LEVEL: INFO + MCPGATEWAY_UI_ENABLED: "true" + AUTH_REQUIRED: "true" + # ... (see full reference below) + + # Kubernetes-specific + replicas: 2 # Number of replicas + service_type: ClusterIP # Service type + service_port: 4444 # Service port + memory_request: 256Mi # Memory request + memory_limit: 512Mi # Memory limit + cpu_request: 100m # CPU request + cpu_limit: 500m # CPU limit + image_pull_policy: IfNotPresent # Image pull policy +``` + +**Build Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `image` | string | ❌* | Pre-built Docker image | - | +| `repo` | string | ❌* | Git repository URL | - | +| `ref` | string | ❌ | Git branch/tag/commit | `main` | +| `context` | string | ❌ | Build context subdirectory | `.` | +| `containerfile` | string | ❌ | Containerfile/Dockerfile path | `Containerfile` | +| `target` | string | ❌ | Multi-stage build target | - | + +\* **Either `image` OR `repo` must be specified** + +**Runtime Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `port` | integer | ❌ | Internal container port | `4444` | +| `host_port` | integer | ❌ | Host port mapping (compose only) | - | +| `env_vars` | object | ❌ | Environment variables | `{}` | +| `mtls_enabled` | boolean | ❌ | Enable mTLS client | `true` | +| `mtls_verify` | boolean | ❌ | Verify server certificates | `true` | +| `mtls_check_hostname` | boolean | ❌ | Verify hostname in cert | `false` | +| `registry` | object | ❌ | Container registry configuration | - | + +**Container Registry Configuration Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `enabled` | boolean | ❌ | Enable registry integration | `false` | +| `url` | string | ❌* | Registry URL (e.g., `docker.io`, `quay.io`, OpenShift registry) | - | +| `namespace` | string | ❌* | Registry namespace/organization/project | - | +| `push` | boolean | ❌ | Push image to registry after build | `true` | +| `image_pull_policy` | string | ❌ | Kubernetes imagePullPolicy (`Always`, `IfNotPresent`, `Never`) | `IfNotPresent` | + +\* Required when `enabled: true` + +**Kubernetes-specific Fields:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `replicas` | integer | ❌ | Number of pod replicas | `1` | +| `service_type` | string | ❌ | Service type (ClusterIP, NodePort, LoadBalancer) | `ClusterIP` | +| `service_port` | integer | ❌ | Service port | `4444` | +| `memory_request` | string | ❌ | Memory request | `256Mi` | +| `memory_limit` | string | ❌ | Memory limit | `512Mi` | +| `cpu_request` | string | ❌ | CPU request | `100m` | +| `cpu_limit` | string | ❌ | CPU limit | `500m` | +| `image_pull_policy` | string | ❌ | Image pull policy | `IfNotPresent` | + +--- + +### Plugin Configuration + +External plugin settings (array of plugin objects): + +```yaml +plugins: + - name: MyPlugin # Required: Unique plugin name + + # Build Configuration (choose ONE) + image: myorg/myplugin:latest # Pre-built image + # OR + repo: https://github.com/org/repo.git # Build from source + ref: main + context: plugins/myplugin + containerfile: Containerfile + target: builder + + # Runtime Configuration + port: 8000 # Internal port + expose_port: true # Expose on host (compose only) + + # mTLS Server Configuration (plugin server) + mtls_enabled: true # Enable mTLS server + + # Container Registry Configuration (optional) + registry: + enabled: true # Enable registry push + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org + push: true # Push after build + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy + + # Environment Variables + env_vars: + LOG_LEVEL: DEBUG + CUSTOM_SETTING: value + + # Plugin Manager Overrides (client-side) + plugin_overrides: + priority: 10 + mode: enforce + description: "My custom plugin" + tags: ["security", "filter"] + + # Kubernetes-specific + replicas: 1 + service_type: ClusterIP + service_port: 8000 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + image_pull_policy: IfNotPresent +``` + +**Required Fields:** + +| Field | Type | Description | +|-------|------|-------------| +| `name` | string | Unique plugin identifier (used for cert CN, service names, etc.) | + +**Build Configuration:** Same as Gateway (see above) + +**Runtime Configuration:** + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `port` | integer | ❌ | Internal container port | `8000` | +| `expose_port` | boolean | ❌ | Expose port on host (compose only) | `false` | +| `env_vars` | object | ❌ | Environment variables | `{}` | +| `mtls_enabled` | boolean | ❌ | Enable mTLS server | `true` | +| `registry` | object | ❌ | Container registry configuration (same fields as gateway) | - | +| `plugin_overrides` | object | ❌ | Plugin manager config overrides | `{}` | + +**Plugin Overrides:** + +| Field | Type | Description | Default | +|-------|------|-------------|---------| +| `priority` | integer | Plugin execution priority (lower = earlier) | - | +| `mode` | string | `enforce`, `monitor`, or `dry-run` | - | +| `description` | string | Plugin description | - | +| `tags` | array | Plugin tags for categorization | - | +| `hooks` | array | Enabled hooks: `prompt_pre_fetch`, `tool_pre_invoke`, etc. | All hooks | + +**Kubernetes-specific:** Same as Gateway (see above) + +--- + +### Certificate Configuration + +mTLS certificate generation settings: + +```yaml +certificates: + # Local certificate generation (default) + validity_days: 825 # Certificate validity period + auto_generate: true # Auto-generate if missing + ca_path: ./certs/mcp/ca # CA certificate directory + gateway_path: ./certs/mcp/gateway # Gateway cert directory + plugins_path: ./certs/mcp/plugins # Plugins cert directory + + # OR use cert-manager (Kubernetes only) + use_cert_manager: true # Use cert-manager for certificates + cert_manager_issuer: mcp-ca-issuer # Issuer/ClusterIssuer name + cert_manager_kind: Issuer # Issuer or ClusterIssuer +``` + +| Field | Type | Required | Description | Default | +|-------|------|----------|-------------|---------| +| `validity_days` | integer | ❌ | Certificate validity in days | `825` | +| `auto_generate` | boolean | ❌ | Auto-generate certificates locally if missing | `true` | +| `ca_path` | string | ❌ | CA certificate directory (local mode) | `./certs/mcp/ca` | +| `gateway_path` | string | ❌ | Gateway client cert directory (local mode) | `./certs/mcp/gateway` | +| `plugins_path` | string | ❌ | Plugin server certs base directory (local mode) | `./certs/mcp/plugins` | +| `use_cert_manager` | boolean | ❌ | Use cert-manager for certificate management (Kubernetes only) | `false` | +| `cert_manager_issuer` | string | ❌ | cert-manager Issuer/ClusterIssuer name | `mcp-ca-issuer` | +| `cert_manager_kind` | string | ❌ | cert-manager issuer kind: `Issuer` or `ClusterIssuer` | `Issuer` | + +#### cert-manager Integration (Kubernetes Only) + +[cert-manager](https://cert-manager.io) is a Kubernetes-native certificate management controller that automates certificate issuance and renewal. + +**Benefits:** +- ✅ **Automatic Renewal**: Certificates renewed before expiry (default: at 2/3 of lifetime) +- ✅ **Native Kubernetes**: Certificates defined as Kubernetes Custom Resources +- ✅ **Simplified Operations**: No manual certificate generation or rotation +- ✅ **GitOps Friendly**: Certificate definitions version-controlled + +**Prerequisites:** +1. Install cert-manager in your cluster: + ```bash + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml + ``` + +2. Create namespace and CA Issuer (one-time setup): + ```bash + # Create namespace first + kubectl create namespace mcp-gateway-test + + # Apply CA Issuer + kubectl apply -f examples/deployment-configs/cert-manager-issuer-example.yaml + ``` + +**Configuration:** +```yaml +certificates: + use_cert_manager: true + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer + validity_days: 825 +``` + +When `use_cert_manager: true`: +- Local certificate generation is skipped +- cert-manager Certificate CRDs are generated for gateway and plugins +- cert-manager automatically creates Kubernetes TLS secrets +- Certificates are auto-renewed before expiry + +**Important**: The cert-manager Issuer and CA certificate are long-lived infrastructure. When you destroy your MCP deployment, the Issuer remains (by design) for reuse across deployments. + +--- + +### Infrastructure Services + +PostgreSQL and Redis are **automatically deployed** with the MCP Gateway stack using hardcoded defaults: + +**PostgreSQL (always deployed):** +- Image: `postgres:17` +- Database: `mcp` +- User: `postgres` +- Password: `mysecretpassword` (override with `POSTGRES_PASSWORD` env var) +- Port: `5432` +- Kubernetes: Uses 10Gi PVC + +**Redis (always deployed):** +- Image: `redis:latest` +- Port: `6379` + +**Connection strings (auto-configured):** +```bash +DATABASE_URL=postgresql://postgres:${POSTGRES_PASSWORD}@postgres:5432/mcp +REDIS_URL=redis://redis:6379/0 +``` + +These services are included in all deployments and cannot currently be disabled or customized via the deployment YAML. To customize PostgreSQL password: + +```bash +# Set before deploying +export POSTGRES_PASSWORD=your-secure-password +cforge gateway deploy deploy.yaml +``` + +--- + +## Example Configurations + +### Example 1: Docker Compose (No mTLS) + +**File:** `examples/deployment-configs/deploy-compose.yaml` + +Simple local deployment for development and testing: + +```yaml +deployment: + type: compose + project_name: mcp-stack-test + +gateway: + image: mcpgateway/mcpgateway:latest + port: 4444 + host_port: 4444 + + env_vars: + LOG_LEVEL: DEBUG + MCPGATEWAY_UI_ENABLED: "true" + AUTH_REQUIRED: "false" + + mtls_enabled: false + +plugins: + - name: OPAPluginFilter + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + + expose_port: true + mtls_enabled: false + + plugin_overrides: + priority: 10 + mode: "enforce" + +certificates: + auto_generate: true +``` + +**Use case:** Quick local testing without security overhead + +**Deploy:** +```bash +cforge gateway deploy examples/deployment-configs/deploy-compose.yaml +``` + +**Access:** +- Gateway: http://localhost:4444 +- Admin UI: http://localhost:4444/admin +- Plugin (exposed): http://localhost:8000 + +--- + +### Example 2: Docker Compose (With mTLS) + +**File:** `examples/deployment-configs/deploy-compose.mtls.yaml` + +Secure local deployment with mutual TLS: + +```yaml +deployment: + type: compose + project_name: mcp-stack-test + +gateway: + image: mcpgateway/mcpgateway:latest + port: 4444 + host_port: 4444 + + mtls_enabled: true # ← Enable mTLS client + mtls_verify: true + mtls_check_hostname: false # Don't verify hostname for localhost + +plugins: + - name: OPAPluginFilter + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + + mtls_enabled: true # ← Enable mTLS server + + plugin_overrides: + priority: 10 + mode: "enforce" + +certificates: + validity_days: 825 + auto_generate: true # Auto-generate mTLS certs +``` + +**Use case:** Local testing with production-like security + +**Deploy:** +```bash +# Certificates are auto-generated during deploy +cforge gateway deploy examples/deployment-configs/deploy-compose.mtls.yaml +``` + +**How mTLS works:** +1. `cforge gateway certs` generates CA + gateway client cert + plugin server certs +2. Gateway connects to plugins using client certificate +3. Plugins verify gateway's client certificate against CA +4. All communication is encrypted and mutually authenticated + +--- + +### Example 3: Kubernetes (Pre-built Images) + +**File:** `examples/deployment-configs/deploy-k8s.yaml` + +Production-ready Kubernetes deployment using pre-built images: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-prod + +gateway: + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + replicas: 2 # High availability + service_type: LoadBalancer + service_port: 4444 + + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + mtls_enabled: true + +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + replicas: 2 + service_type: ClusterIP + + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + mtls_enabled: true + + plugin_overrides: + priority: 10 + mode: "enforce" + +infrastructure: + postgres: + enabled: true + storage_size: 20Gi + storage_class: fast-ssd + redis: + enabled: true + +certificates: + auto_generate: true +``` + +**Use case:** Production deployment with HA and resource limits + +**Deploy:** +```bash +# Deploy to Kubernetes +cforge gateway deploy examples/deployment-configs/deploy-k8s.yaml + +# Verify +kubectl get all -n mcp-gateway-prod + +# Check logs +kubectl logs -n mcp-gateway-prod -l app=mcp-gateway +``` + +--- + +### Example 4: Kubernetes (Build from Source) + +Building plugins from Git repositories in Kubernetes: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-dev + +gateway: + image: mcpgateway/mcpgateway:latest + +plugins: + - name: OPAPluginFilter + # Build from source + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Push to registry (configure with env vars) + # See DOCKER_REGISTRY in deploy process + + replicas: 1 + mtls_enabled: true + +certificates: + auto_generate: true +``` + +**Deploy:** +```bash +# Build locally and push to registry +export DOCKER_REGISTRY=myregistry.io/myorg +cforge gateway build deploy-k8s-build.yaml + +# Deploy to Kubernetes +cforge gateway deploy deploy-k8s-build.yaml --skip-build +``` + +--- + +### Example 5: Kubernetes with cert-manager + +**File:** `examples/deployment-configs/deploy-k8s-cert-manager.yaml` + +Production deployment using cert-manager for automated certificate management: + +```yaml +deployment: + type: kubernetes + namespace: mcp-gateway-test + +gateway: + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + service_type: ClusterIP + service_port: 4444 + + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + env_vars: + LOG_LEVEL: DEBUG + MCPGATEWAY_UI_ENABLED: "true" + + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + service_type: ClusterIP + + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + + mtls_enabled: true + + plugin_overrides: + priority: 10 + mode: "enforce" + +# cert-manager configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # Reference the Issuer created in prerequisites + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer + + # Certificate validity (auto-renewed at 2/3 of lifetime) + validity_days: 825 + + # Local paths not used when use_cert_manager=true + auto_generate: false +``` + +**Prerequisites:** + +1. Install cert-manager: + ```bash + kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml + ``` + +2. Create namespace and CA Issuer (one-time setup): + ```bash + # Create namespace first + kubectl create namespace mcp-gateway-test + + # Apply CA Issuer + kubectl apply -f examples/deployment-configs/cert-manager-issuer-example.yaml + ``` + +**Deploy:** +```bash +# Deploy (no need to generate certificates manually) +cforge gateway deploy examples/deployment-configs/deploy-k8s-cert-manager.yaml + +# Verify cert-manager created certificates +kubectl get certificates -n mcp-gateway-test +kubectl get secrets -n mcp-gateway-test | grep mcp- +``` + +**How it works:** +1. `cforge gateway deploy` skips local certificate generation +2. Generates cert-manager Certificate CRDs for gateway and plugins +3. Applies Certificate CRDs to Kubernetes +4. cert-manager automatically creates TLS secrets +5. Pods use the secrets created by cert-manager +6. cert-manager auto-renews certificates before expiry + +**Certificate lifecycle:** +- **Creation**: cert-manager generates certificates when CRDs are applied +- **Renewal**: Automatic renewal at 2/3 of lifetime (550 days for 825-day cert) +- **Deletion**: Certificates deleted when stack is destroyed, Issuer remains + +--- + +## mTLS Configuration Guide + +### Understanding mTLS in MCP Gateway + +**mTLS (Mutual TLS)** provides: +- **Encryption**: All gateway ↔ plugin traffic is encrypted +- **Authentication**: Both parties prove their identity +- **Authorization**: Only trusted certificates can communicate + +### Certificate Hierarchy + +``` +CA (Root Certificate Authority) +├── Gateway Client Certificate +│ └── Used by gateway to connect to plugins +└── Plugin Server Certificates (one per plugin) + └── Used by plugins to authenticate gateway +``` + +### Enabling mTLS + +**In your configuration:** + +```yaml +gateway: + mtls_enabled: true # Enable mTLS client + mtls_verify: true # Verify server certificates + mtls_check_hostname: false # Skip hostname verification (for localhost/IPs) + +plugins: + - name: MyPlugin + mtls_enabled: true # Enable mTLS server +``` + +### Certificate Generation + +**Automatic (recommended):** +```yaml +certificates: + auto_generate: true # Auto-generate during deploy + validity_days: 825 # ~2.3 years +``` + +**Manual:** +```bash +# Generate certificates explicitly +cforge gateway certs deploy.yaml + +# Certificates are created in: +# - certs/mcp/ca/ (CA) +# - certs/mcp/gateway/ (gateway client cert) +# - certs/mcp/plugins/*/ (plugin server certs) +``` + +### Environment Variables + +The deployment tool automatically sets these environment variables: + +**Gateway (client):** +```bash +PLUGINS_CLIENT_MTLS_CERTFILE=/certs/gateway/client.crt +PLUGINS_CLIENT_MTLS_KEYFILE=/certs/gateway/client.key +PLUGINS_CLIENT_MTLS_CA_BUNDLE=/certs/gateway/ca.crt +PLUGINS_CLIENT_MTLS_VERIFY=true +PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME=false +``` + +**Plugin (server):** +```bash +PLUGINS_SERVER_SSL_CERTFILE=/certs/server.crt +PLUGINS_SERVER_SSL_KEYFILE=/certs/server.key +PLUGINS_SERVER_SSL_CA_CERTS=/certs/ca.crt +PLUGINS_SERVER_SSL_CERT_REQS=2 # CERT_REQUIRED +``` + +### Troubleshooting mTLS + +**Problem: Certificate verification fails** + +Check certificate validity: +```bash +openssl x509 -in certs/mcp/gateway/client.crt -noout -dates +openssl x509 -in certs/mcp/plugins/MyPlugin/server.crt -noout -dates +``` + +**Problem: Hostname mismatch errors** + +Solution: Set `mtls_check_hostname: false` in gateway config, or use service DNS names + +**Problem: Connection refused** + +- Verify plugin has `mtls_enabled: true` +- Check plugin logs for certificate errors +- Ensure certificates are mounted correctly + +**Problem: Expired certificates** + +Regenerate: +```bash +rm -rf certs/ +cforge gateway certs deploy.yaml +``` + +Then redeploy to distribute new certificates. + +--- + +## Container Registry Integration + +### Overview + +The container registry feature allows you to build images locally and automatically push them to container registries (Docker Hub, Quay.io, OpenShift internal registry, private registries, etc.). This is essential for: + +✅ **Kubernetes/OpenShift deployments** - Avoid ImagePullBackOff errors +✅ **Team collaboration** - Share images across developers and environments +✅ **CI/CD pipelines** - Build once, deploy everywhere +✅ **Production deployments** - Use trusted registry sources + +### How It Works + +1. **Build**: Images are built locally using docker/podman +2. **Tag**: Images are automatically tagged with the registry path +3. **Push**: Images are pushed to the registry (if `push: true`) +4. **Deploy**: Kubernetes manifests reference the registry images + +### Configuration + +Add a `registry` section to your gateway and/or plugin configurations: + +```yaml +gateway: + repo: https://github.com/yourorg/yourrepo.git + + # Container registry configuration + registry: + enabled: true # Enable registry integration + url: registry.example.com # Registry URL + namespace: myproject # Registry namespace/org/project + push: true # Push after build (default: true) + image_pull_policy: IfNotPresent # Kubernetes imagePullPolicy +``` + +**Configuration Fields:** + +| Field | Required | Description | Example | +|-------|----------|-------------|---------| +| `enabled` | Yes | Enable registry push | `true` | +| `url` | Yes* | Registry URL | `docker.io`, `quay.io`, `registry.mycompany.com` | +| `namespace` | Yes* | Registry namespace/organization/project | `myusername`, `myorg`, `mcp-gateway-test` | +| `push` | No | Push image after build | `true` (default) | +| `image_pull_policy` | No | Kubernetes imagePullPolicy | `IfNotPresent` (default) | + +\* Required when `enabled: true` + +### Common Registry Examples + +#### Docker Hub + +```yaml +registry: + enabled: true + url: docker.io + namespace: myusername + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +docker login +``` + +#### Quay.io + +```yaml +registry: + enabled: true + url: quay.io + namespace: myorganization + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +podman login quay.io +``` + +#### OpenShift Internal Registry + +```yaml +registry: + enabled: true + url: default-route-openshift-image-registry.apps-crc.testing + namespace: mcp-gateway-test + push: true + image_pull_policy: Always +``` + +**Authentication:** +```bash +# OpenShift Local (CRC) +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) + +# OpenShift on cloud +oc registry login +``` + +#### Private Registry + +```yaml +registry: + enabled: true + url: registry.mycompany.com + namespace: devteam + push: true + image_pull_policy: IfNotPresent +``` + +**Authentication:** +```bash +podman login registry.mycompany.com -u myusername +``` + +### Image Naming + +When registry is enabled, images are automatically tagged with the full registry path: + +**Local tag (without registry):** +``` +mcpgateway-gateway:latest +mcpgateway-opapluginfilter:latest +``` + +**Registry tag (with registry enabled):** +``` +registry.example.com/myproject/mcpgateway-gateway:latest +registry.example.com/myproject/mcpgateway-opapluginfilter:latest +``` + +### Image Pull Policies + +Choose the appropriate policy for your use case: + +| Policy | Description | Best For | +|--------|-------------|----------| +| `Always` | Pull image every time pod starts | Development, testing latest changes | +| `IfNotPresent` | Pull only if image doesn't exist locally | Production, stable releases | +| `Never` | Never pull, only use local images | Air-gapped environments | + +### Workflow Example + +#### OpenShift Local Deployment + +```bash +# 1. Authenticate to OpenShift registry +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) + +# 2. Build and push images +cforge gateway deploy examples/deployment-configs/deploy-openshift-local-registry.yaml + +# The tool will: +# - Build images locally +# - Tag with registry paths +# - Push to OpenShift internal registry +# - Generate manifests with registry image references +# - Deploy to cluster + +# 3. Verify images were pushed +oc get imagestreams -n mcp-gateway-test + +# Output: +# NAME IMAGE REPOSITORY +# mcpgateway-gateway default-route-.../mcp-gateway-test/mcpgateway-gateway +# mcpgateway-opapluginfilter default-route-.../mcp-gateway-test/mcpgateway-opapluginfilter +``` + +#### CI/CD Pipeline Example + +```bash +# In your CI/CD pipeline: + +# 1. Authenticate to registry +echo "$REGISTRY_PASSWORD" | docker login $REGISTRY_URL -u $REGISTRY_USER --password-stdin + +# 2. Build and push +cforge gateway build deploy-prod.yaml + +# 3. Images are automatically pushed to registry + +# 4. Deploy to Kubernetes (manifests already reference registry images) +cforge gateway deploy deploy-prod.yaml --skip-build --skip-certs +``` + +### Per-Component Configuration + +Each component (gateway and plugins) can have different registry settings: + +```yaml +gateway: + repo: https://github.com/myorg/gateway.git + registry: + enabled: true + url: quay.io + namespace: myorg + push: true + +plugins: + - name: MyPlugin + repo: https://github.com/myorg/plugin.git + registry: + enabled: true + url: docker.io # Different registry + namespace: myusername # Different namespace + push: true + + - name: InternalPlugin + repo: https://github.com/myorg/internal-plugin.git + # No registry - use local image only + registry: + enabled: false +``` + +This allows you to: +- Push gateway to one registry, plugins to another +- Skip registry push for some components +- Use different namespaces per component +- Mix local and registry images + +### Tag-Only Mode + +To tag images without pushing (useful for testing): + +```yaml +registry: + enabled: true + url: registry.example.com + namespace: myproject + push: false # Tag but don't push +``` + +**Use cases:** +- Test registry configuration before pushing +- Generate manifests with registry paths for GitOps +- Manual push workflow + +### Troubleshooting + +#### Authentication Errors + +**Error:** `Failed to push to registry: unauthorized` + +**Solution:** Authenticate to the registry before building: +```bash +# Docker Hub +docker login + +# Quay.io +podman login quay.io + +# Private registry +podman login registry.mycompany.com -u myusername + +# OpenShift +podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +``` + +#### ImagePullBackOff in Kubernetes + +**Error:** Pods show `ImagePullBackOff` status + +**Possible causes:** +1. Image doesn't exist in registry (push failed) +2. Registry authentication not configured in Kubernetes +3. Network connectivity issues +4. Wrong image path/tag + +**Solutions:** + +**1. Verify image exists:** +```bash +# OpenShift +oc get imagestreams -n mcp-gateway-test + +# Docker Hub/Quay +podman search your-registry.com/namespace/image-name +``` + +**2. Configure Kubernetes pull secrets:** +```bash +# Create docker-registry secret +kubectl create secret docker-registry regcred \ + --docker-server=registry.example.com \ + --docker-username=myusername \ + --docker-password=mypassword \ + --docker-email=myemail@example.com \ + -n mcp-gateway-test + +# Update deployment to use secret (manual step, or add to template) +``` + +**3. For OpenShift, grant pull permissions:** +```bash +# Allow default service account to pull from namespace +oc policy add-role-to-user system:image-puller \ + system:serviceaccount:mcp-gateway-test:default \ + -n mcp-gateway-test +``` + +#### Push Failed: Too Large + +**Error:** `image push failed: blob upload exceeds max size` + +**Solution:** Some registries have size limits. Options: +1. Use multi-stage builds to reduce image size +2. Switch to a registry with larger limits +3. Split into smaller images + +#### Podman Trying HTTP Instead of HTTPS (OpenShift/CRC) + +**Error:** `pinging container registry ...: Get "http://...: dial tcp 127.0.0.1:80: connection refused` + +**Cause:** Podman doesn't know the registry uses HTTPS and defaults to HTTP on port 80. + +**Solution:** Configure podman to use HTTPS for the registry: + +```bash +# SSH into podman machine and configure registries.conf +podman machine ssh -- "sudo bash -c ' +if ! grep -q \"default-route-openshift-image-registry.apps-crc.testing\" /etc/containers/registries.conf 2>/dev/null; then + echo \"\" >> /etc/containers/registries.conf + echo \"[[registry]]\" >> /etc/containers/registries.conf + echo \"location = \\\"default-route-openshift-image-registry.apps-crc.testing\\\"\" >> /etc/containers/registries.conf + echo \"insecure = true\" >> /etc/containers/registries.conf + echo \"Registry configuration added\" +else + echo \"Registry already configured\" +fi +'" + +# Restart podman machine +podman machine restart + +# Wait for restart +sleep 10 + +# Verify you can now push +podman push default-route-openshift-image-registry.apps-crc.testing/namespace/image:tag +``` + +**Alternative solution:** Use the internal registry service name instead of the route: + +```yaml +registry: + url: image-registry.openshift-image-registry.svc:5000 + namespace: mcp-gateway-test +``` + +This bypasses the external route and connects directly to the internal service (HTTPS on port 5000). + +#### Registry URL Format + +**Correct formats:** +```yaml +url: docker.io # Docker Hub +url: quay.io # Quay.io +url: gcr.io # Google Container Registry +url: registry.mycompany.com # Private registry +url: default-route-openshift-image-registry.apps-crc.testing # OpenShift +``` + +**Incorrect formats:** +```yaml +url: https://docker.io # No protocol +url: docker.io/myusername # No namespace in URL +url: registry:5000 # Include port in URL, not namespace +``` + +### Best Practices + +✅ **DO:** +- Authenticate to registry before building +- Use specific version tags in production (not `:latest`) +- Test registry configuration with `push: false` first +- Use `image_pull_policy: Always` for development +- Use `image_pull_policy: IfNotPresent` for production +- Organize images by namespace/project + +❌ **DON'T:** +- Commit registry credentials to Git +- Use `latest` tag in production +- Mix local and registry images without testing +- Skip authentication step +- Use `push: true` for testing without verifying first + +### Example Configurations + +Full examples available in: +- `examples/deployment-configs/deploy-openshift-local.yaml` - Registry config commented +- `examples/deployment-configs/deploy-openshift-local-registry.yaml` - Full registry setup + +--- + +## Deployment Modes + +### Plain Python Mode (Default) + +**What is it?** +Pure Python implementation using standard tools (`docker`, `kubectl`, `git`, etc.). This is the **default mode** to avoid automatic downloads. + +**When to use:** +- ✅ Default choice (no surprises) +- ✅ Environments without Dagger support +- ✅ Air-gapped networks +- ✅ Simple deployments +- ✅ Debugging/troubleshooting + +**Requirements:** +- Python 3.11+ +- Docker CLI +- `kubectl` (for Kubernetes deployments) +- `git` (for building from source) + +**Usage:** +```bash +# Plain Python mode (default, no flag needed) +cforge gateway deploy deploy.yaml +``` + +**Characteristics:** +- Sequential builds +- Standard caching +- No external dependencies beyond Docker/kubectl + +--- + +### Dagger Mode (Opt-in) + +**What is Dagger?** +Dagger is a programmable CI/CD engine that runs pipelines in containers. It provides: +- **Reproducible builds**: Same results everywhere +- **Parallel execution**: Faster builds +- **Intelligent caching**: Only rebuild what changed +- **Cross-platform**: Works on any system with Docker + +**When to use:** +- ✅ Local development (fastest builds) +- ✅ CI/CD pipelines (GitHub Actions, GitLab CI, etc.) +- ✅ Team environments (consistent results) +- ✅ When you want optimized build performance + +**Requirements:** +- Docker or compatible container runtime +- `dagger-io` Python package (optional, installed separately) +- **Note**: First use will auto-download the Dagger CLI (~100MB) + +**Enable:** +```bash +# Install dagger-io package first +pip install dagger-io + +# Use Dagger mode (opt-in with --dagger flag) +cforge gateway --dagger deploy deploy.yaml +``` + +**Performance benefits:** +- 2-3x faster builds with caching +- Parallel plugin builds +- Efficient layer reuse + +**Important**: Using `--dagger` will automatically download the Dagger CLI binary on first use if not already present. Use plain Python mode if you want to avoid automatic downloads + +--- + +## CI/CD Integration + +### GitHub Actions + +```yaml +name: Deploy MCP Gateway + +on: + push: + branches: [main] + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install cforge + run: pip install -e . + + - name: Validate configuration + run: cforge gateway validate deploy/deploy-prod.yaml + + - name: Build containers + run: cforge gateway build deploy/deploy-prod.yaml + env: + DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} + + - name: Generate certificates + run: cforge gateway certs deploy/deploy-prod.yaml + + - name: Deploy to Kubernetes + run: cforge gateway deploy deploy/deploy-prod.yaml --skip-build + env: + KUBECONFIG: ${{ secrets.KUBECONFIG }} + + - name: Verify deployment + run: cforge gateway verify deploy/deploy-prod.yaml +``` + +--- + +### GitLab CI + +```yaml +stages: + - validate + - build + - deploy + +variables: + CONFIG_FILE: deploy/deploy-prod.yaml + +validate: + stage: validate + script: + - pip install -e . + - cforge gateway validate $CONFIG_FILE + +build: + stage: build + script: + - pip install -e . + - cforge gateway build $CONFIG_FILE + artifacts: + paths: + - deploy/ + +deploy: + stage: deploy + script: + - pip install -e . + - cforge gateway deploy $CONFIG_FILE --skip-build + environment: + name: production + only: + - main +``` + +--- + +## Best Practices + +### Configuration Management + +✅ **DO:** +- Version control your `deploy.yaml` +- Use Git tags/branches for plugin versions (`ref: v1.2.3`) +- Separate configs for dev/staging/prod +- Document custom `env_vars` in comments + +❌ **DON'T:** +- Hardcode secrets in YAML (use environment files) +- Use `ref: main` in production (pin versions) +- Commit generated certificates to Git + +### Environment Variables + +✅ **DO:** +```bash +# Review and customize .env files after build +cforge gateway build deploy.yaml +# Edit deploy/env/.env.gateway +# Edit deploy/env/.env.PluginName +cforge gateway deploy deploy.yaml --skip-build +``` + +❌ **DON'T:** +```bash +# Deploy without reviewing environment +cforge gateway deploy deploy.yaml # May use default/insecure values +``` + +### Certificate Management + +✅ **DO:** +- Let `cforge` auto-generate certificates +- Rotate certificates before expiry +- Use separate CAs for dev/staging/prod +- Backup CA private key securely + +❌ **DON'T:** +- Share certificates between environments +- Commit CA private key to Git +- Use expired certificates + +### Resource Limits + +✅ **DO:** +```yaml +gateway: + memory_request: 256Mi + memory_limit: 512Mi # 2x request for burst capacity + cpu_request: 100m + cpu_limit: 500m # Allow bursting +``` + +❌ **DON'T:** +```yaml +gateway: + # Missing resource limits = unbounded usage + # OR + memory_limit: 256Mi # Too tight, may OOM +``` + +### High Availability + +✅ **DO:** +```yaml +gateway: + replicas: 2 # Multiple replicas + service_type: LoadBalancer + +plugins: + - name: CriticalPlugin + replicas: 2 # HA for critical plugins +``` + +❌ **DON'T:** +```yaml +gateway: + replicas: 1 # Single point of failure in production +``` + +--- + +## Troubleshooting + +### Build Issues + +**Problem: Git clone fails** +``` +Error: Failed to clone repository +``` + +**Solution:** +- Check `repo` URL is correct +- Verify Git credentials/SSH keys +- Ensure network connectivity +- For private repos, configure Git auth + +--- + +**Problem: Docker build fails** +``` +Error: Build failed for plugin MyPlugin +``` + +**Solution:** +1. Check `context` and `containerfile` paths +2. Verify Containerfile syntax +3. Review plugin repository structure +4. Try building manually: + ```bash + git clone + cd + docker build -f . + ``` + +--- + +### Deployment Issues + +**Problem: Pod/container fails to start** +``` +Error: CrashLoopBackOff +``` + +**Solution:** +1. Check logs: + ```bash + # Kubernetes + kubectl logs -n + + # Docker Compose + docker-compose -f deploy/docker-compose.yaml logs + ``` +2. Verify environment variables in `deploy/env/` +3. Check resource limits (may be too low) +4. Verify image was built/pulled correctly + +--- + +**Problem: mTLS connection fails** +``` +Error: SSL certificate verification failed +``` + +**Solution:** +1. Regenerate certificates: + ```bash + rm -rf certs/ + cforge gateway certs deploy.yaml + ``` +2. Redeploy to distribute new certs: + ```bash + cforge gateway deploy deploy.yaml --skip-build --skip-certs + ``` +3. Check certificate expiry: + ```bash + openssl x509 -in certs/mcp/gateway/client.crt -noout -dates + ``` + +--- + +### Verification Issues + +**Problem: Deployment verification timeout** +``` +Error: Verification failed: timeout waiting for deployment +``` + +**Solution:** +1. Increase timeout: + ```bash + cforge gateway verify deploy.yaml --timeout 600 + ``` +2. Check pod/container status manually +3. Review resource availability (CPU/memory) +4. Check for image pull errors + +--- + +## FAQ + +**Q: Can I use pre-built images instead of building from source?** + +A: Yes! Just specify `image` instead of `repo`: +```yaml +plugins: + - name: MyPlugin + image: myorg/myplugin:v1.0.0 +``` + +--- + +**Q: How do I update a plugin to a new version?** + +A: Update the `ref` and redeploy: +```yaml +plugins: + - name: MyPlugin + repo: https://github.com/org/repo.git + ref: v2.0.0 # ← Update version +``` + +Then: +```bash +cforge gateway build deploy.yaml --plugin MyPlugin --no-cache +cforge gateway deploy deploy.yaml --skip-certs +``` + +--- + +**Q: Can I deploy only the gateway without plugins?** + +A: Yes, just omit the `plugins` section or use an empty array: +```yaml +plugins: [] +``` + +--- + +**Q: How do I add custom environment variables?** + +A: Two ways: + +**1. In YAML (committed to Git):** +```yaml +gateway: + env_vars: + CUSTOM_VAR: value +``` + +**2. In .env file (not committed):** +```bash +# deploy/env/.env.gateway +CUSTOM_VAR=value +``` + +--- + +**Q: Can I use cforge in a CI/CD pipeline?** + +A: Absolutely! See [CI/CD Integration](#cicd-integration) section above. + +--- + +**Q: How do I switch between Dagger and plain Python modes?** + +A: +```bash +# Plain Python mode (default) +cforge gateway deploy deploy.yaml + +# Dagger mode (opt-in, requires dagger-io package) +cforge gateway --dagger deploy deploy.yaml +``` + +**Note**: Dagger mode requires installing the `dagger-io` package and will auto-download the Dagger CLI (~100MB) on first use + +--- + +**Q: Where are the generated manifests stored?** + +A: Default: `deploy/` directory +- `deploy/docker-compose.yaml` (Compose mode) +- `deploy/manifests/` (Kubernetes mode) + +Custom location: +```bash +cforge gateway deploy deploy.yaml --output-dir ./my-deploy +``` + +--- + +**Q: How do I access the gateway after deployment?** + +A: +- **Docker Compose**: `http://localhost:` (default: 4444) +- **Kubernetes LoadBalancer**: Get external IP: + ```bash + kubectl get svc -n mcp-gateway + ``` +- **Kubernetes ClusterIP**: Port-forward: + ```bash + kubectl port-forward -n svc/mcp-gateway 4444:4444 + ``` + +--- + +## Additional Resources + +- **Main Documentation**: [ContextForge Documentation](/) +- **Plugin Development**: [Plugin Framework Guide](/plugins/framework) +- **mTLS Setup**: [mTLS Configuration Guide](/using/plugins/mtls) +- **Example Configs**: [`examples/deployment-configs/`](https://github.com/terylt/mcp-context-forge/tree/main/examples/deployment-configs) +- **Source Code**: [`mcpgateway/tools/builder/`](https://github.com/terylt/mcp-context-forge/tree/main/mcpgateway/tools/builder) + +--- + +## Getting Help + +If you encounter issues: + +1. **Check logs**: Review detailed error messages +2. **Validate config**: Run `cforge gateway validate deploy.yaml` +3. **Dry-run**: Test with `cforge gateway deploy deploy.yaml --dry-run` +4. **Verbose mode**: Use `cforge gateway -v ` for detailed output +5. **Debug mode**: Set `export MCP_DEBUG=1` for stack traces +6. **GitHub Issues**: [Report bugs and request features](https://github.com/terylt/mcp-context-forge/issues) + +--- diff --git a/docs/docs/index.md b/docs/docs/index.md index c03974356..d8e7e93d5 100644 --- a/docs/docs/index.md +++ b/docs/docs/index.md @@ -1435,6 +1435,12 @@ MCP Gateway uses Alembic for database migrations. Common commands: | ------------------------------ | ------------------------------------------------ | --------------------- | ------- | | `PLUGINS_ENABLED` | Enable the plugin framework | `false` | bool | | `PLUGIN_CONFIG_FILE` | Path to main plugin configuration file | `plugins/config.yaml` | string | +| `PLUGINS_MTLS_CA_BUNDLE` | (Optional) default CA bundle for external plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_CERT` | (Optional) gateway client certificate for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY` | (Optional) gateway client key for plugin mTLS | _(empty)_ | string | +| `PLUGINS_MTLS_CLIENT_KEY_PASSWORD` | (Optional) password for plugin client key | _(empty)_ | string | +| `PLUGINS_MTLS_VERIFY` | (Optional) verify remote plugin certificates (`true`/`false`) | `true` | bool | +| `PLUGINS_MTLS_CHECK_HOSTNAME` | (Optional) enforce hostname verification for plugins | `true` | bool | | `PLUGINS_CLI_COMPLETION` | Enable auto-completion for plugins CLI | `false` | bool | | `PLUGINS_CLI_MARKUP_MODE` | Set markup mode for plugins CLI | (none) | `rich`, `markdown`, `disabled` | diff --git a/docs/docs/manage/mtls.md b/docs/docs/manage/mtls.md new file mode 100644 index 000000000..f6330a177 --- /dev/null +++ b/docs/docs/manage/mtls.md @@ -0,0 +1,928 @@ +# mTLS (Mutual TLS) Configuration + +Configure mutual TLS authentication for MCP Gateway to enable certificate-based client authentication and enhanced security. + +## Overview + +Mutual TLS (mTLS) provides bidirectional authentication between clients and servers using X.509 certificates. While native mTLS support is in development ([#568](https://github.com/IBM/mcp-context-forge/issues/568)), MCP Gateway can leverage reverse proxies for production-ready mTLS today. + +## Current Status + +- **Native mTLS**: 🚧 In Progress - tracked in [#568](https://github.com/IBM/mcp-context-forge/issues/568) +- **Proxy-based mTLS**: ✅ Available - using Nginx, Caddy, or other reverse proxies +- **Container Support**: ✅ Ready - lightweight containers support proxy deployment + +## Architecture + +```mermaid +sequenceDiagram + participant Client + participant Proxy as Reverse Proxy
(Nginx/Caddy) + participant Gateway as MCP Gateway + participant MCP as MCP Server + + Client->>Proxy: TLS Handshake
+ Client Certificate + Proxy->>Proxy: Verify Client Cert + Proxy->>Gateway: HTTP + X-SSL Headers + Gateway->>Gateway: Extract User from Headers + Gateway->>MCP: Forward Request + MCP-->>Gateway: Response + Gateway-->>Proxy: Response + Proxy-->>Client: TLS Response +``` + +## Quick Start + +### Option 1: Docker Compose with Nginx mTLS + +1. **Generate certificates** (for testing): + +```bash +# Create certificates directory +mkdir -p certs/mtls + +# Generate CA certificate +openssl req -x509 -newkey rsa:4096 -days 365 -nodes \ + -keyout certs/mtls/ca.key -out certs/mtls/ca.crt \ + -subj "/C=US/ST=State/L=City/O=MCP-CA/CN=MCP Root CA" + +# Generate server certificate +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/mtls/server.key -out certs/mtls/server.csr \ + -subj "/CN=gateway.local" + +openssl x509 -req -in certs/mtls/server.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/mtls/server.crt -days 365 + +# Generate client certificate +openssl req -newkey rsa:4096 -nodes \ + -keyout certs/mtls/client.key -out certs/mtls/client.csr \ + -subj "/CN=admin@example.com" + +openssl x509 -req -in certs/mtls/client.csr \ + -CA certs/mtls/ca.crt -CAkey certs/mtls/ca.key \ + -CAcreateserial -out certs/mtls/client.crt -days 365 + +# Create client bundle for testing +cat certs/mtls/client.crt certs/mtls/client.key > certs/mtls/client.pem +``` + +2. **Create Nginx configuration** (`nginx-mtls.conf`): + +```nginx +events { + worker_connections 1024; +} + +http { + upstream mcp_gateway { + server gateway:4444; + } + + server { + listen 443 ssl; + server_name gateway.local; + + # Server certificates + ssl_certificate /etc/nginx/certs/server.crt; + ssl_certificate_key /etc/nginx/certs/server.key; + + # mTLS client verification + ssl_client_certificate /etc/nginx/certs/ca.crt; + ssl_verify_client on; + ssl_verify_depth 2; + + # Strong TLS settings + ssl_protocols TLSv1.2 TLSv1.3; + ssl_ciphers HIGH:!aNULL:!MD5; + ssl_prefer_server_ciphers on; + + location / { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + + # Pass client certificate info to MCP Gateway + proxy_set_header X-SSL-Client-Cert $ssl_client_escaped_cert; + proxy_set_header X-SSL-Client-S-DN $ssl_client_s_dn; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-SSL-Client-Verify $ssl_client_verify; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + + # Standard proxy headers + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + } + + # WebSocket support + location /ws { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + } + + # SSE support + location ~ ^/servers/.*/sse$ { + proxy_pass http://mcp_gateway; + proxy_http_version 1.1; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + proxy_set_header Connection ""; + proxy_buffering off; + proxy_cache off; + } + } +} +``` + +3. **Create Docker Compose file** (`docker-compose-mtls.yml`): + +```yaml +version: '3.8' + +services: + nginx-mtls: + image: nginx:alpine + ports: + - "443:443" + volumes: + - ./nginx-mtls.conf:/etc/nginx/nginx.conf:ro + - ./certs/mtls:/etc/nginx/certs:ro + networks: + - mcpnet + depends_on: + - gateway + + gateway: + image: ghcr.io/ibm/mcp-context-forge:latest + environment: + - HOST=0.0.0.0 + - PORT=4444 + - DATABASE_URL=sqlite:////app/data/mcp.db + + # Disable JWT auth and trust proxy headers + - MCP_CLIENT_AUTH_ENABLED=false + - TRUST_PROXY_AUTH=true + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN + + # Keep admin UI protected + - AUTH_REQUIRED=true + - BASIC_AUTH_USER=admin + - BASIC_AUTH_PASSWORD=changeme + + # Enable admin features + - MCPGATEWAY_UI_ENABLED=true + - MCPGATEWAY_ADMIN_API_ENABLED=true + networks: + - mcpnet + volumes: + - ./data:/app/data # persists SQLite database at /app/data/mcp.db + +networks: + mcpnet: + driver: bridge +``` +> 💾 Run `mkdir -p data` before `docker-compose up` so the SQLite database survives restarts. + + +4. **Test the connection**: + +```bash +# Start the services +docker-compose -f docker-compose-mtls.yml up -d + +# Test with client certificate +curl --cert certs/mtls/client.pem \ + --cacert certs/mtls/ca.crt \ + https://localhost/health + +# Test without certificate (should fail) +curl https://localhost/health +# Error: SSL certificate problem +``` + +### Option 2: Caddy with mTLS + +1. **Create Caddyfile** (`Caddyfile.mtls`): + +```caddyfile +{ + # Global options + debug +} + +gateway.local { + # Enable mTLS + tls { + client_auth { + mode require_and_verify + trusted_ca_cert_file /etc/caddy/certs/ca.crt + } + } + + # Reverse proxy to MCP Gateway + reverse_proxy gateway:4444 { + # Pass certificate info as headers + header_up X-SSL-Client-Cert {http.request.tls.client.certificate_pem_escaped} + header_up X-SSL-Client-S-DN {http.request.tls.client.subject} + header_up X-SSL-Client-S-DN-CN {http.request.tls.client.subject_cn} + header_up X-Authenticated-User {http.request.tls.client.subject_cn} + + # WebSocket support + @websocket { + header Connection *Upgrade* + header Upgrade websocket + } + transport http { + versions 1.1 + } + } +} +``` + +2. **Docker Compose with Caddy**: + +```yaml +version: '3.8' + +services: + caddy-mtls: + image: caddy:alpine + ports: + - "443:443" + volumes: + - ./Caddyfile.mtls:/etc/caddy/Caddyfile:ro + - ./certs/mtls:/etc/caddy/certs:ro + - caddy_data:/data + - caddy_config:/config + networks: + - mcpnet + depends_on: + - gateway + + gateway: + # Same configuration as Nginx example + image: ghcr.io/ibm/mcp-context-forge:latest + environment: + - MCP_CLIENT_AUTH_ENABLED=false + - TRUST_PROXY_AUTH=true + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN + # ... rest of config ... + networks: + - mcpnet + +volumes: + caddy_data: + caddy_config: + +networks: + mcpnet: + driver: bridge +``` + +## Production Configuration + +### Enterprise PKI Integration + +For production deployments, integrate with your enterprise PKI: + +```nginx +# nginx.conf - Enterprise PKI +server { + listen 443 ssl; + + # Server certificates from enterprise CA + ssl_certificate /etc/pki/tls/certs/gateway.crt; + ssl_certificate_key /etc/pki/tls/private/gateway.key; + + # Client CA chain + ssl_client_certificate /etc/pki/tls/certs/enterprise-ca-chain.crt; + ssl_verify_client on; + ssl_verify_depth 3; + + # CRL verification + ssl_crl /etc/pki/tls/crl/enterprise.crl; + + # OCSP stapling + ssl_stapling on; + ssl_stapling_verify on; + ssl_trusted_certificate /etc/pki/tls/certs/enterprise-ca-chain.crt; + + location / { + proxy_pass http://mcp-gateway:4444; + + # Extract user from certificate DN + if ($ssl_client_s_dn ~ /CN=([^\/]+)/) { + set $cert_cn $1; + } + proxy_set_header X-Authenticated-User $cert_cn; + + # Extract organization + if ($ssl_client_s_dn ~ /O=([^\/]+)/) { + set $cert_org $1; + } + proxy_set_header X-User-Organization $cert_org; + } +} +``` + +### Kubernetes Deployment Options + +### Option 1: Helm Chart with TLS Ingress + +The MCP Gateway Helm chart (`charts/mcp-stack`) includes built-in TLS support via Ingress: + +```bash +# Install with TLS enabled +helm install mcp-gateway ./charts/mcp-stack \ + --set mcpContextForge.ingress.enabled=true \ + --set mcpContextForge.ingress.host=gateway.example.com \ + --set mcpContextForge.ingress.tls.enabled=true \ + --set mcpContextForge.ingress.tls.secretName=gateway-tls \ + --set mcpContextForge.ingress.annotations."cert-manager\.io/cluster-issuer"=letsencrypt-prod \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-secret"=mcp-system/gateway-client-ca \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-verify-client"=on \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-verify-depth"="2" \ + --set mcpContextForge.ingress.annotations."nginx.ingress.kubernetes.io/auth-tls-pass-certificate-to-upstream"="true" +``` + + +> ℹ️ The configuration snippet that forwards the client CN is easier to maintain in `values.yaml`; the one-liner above focuses on core flags. + +Or configure via `values.yaml`: + +```yaml +# charts/mcp-stack/values.yaml excerpt +mcpContextForge: + ingress: + enabled: true + className: nginx + host: gateway.example.com + annotations: + cert-manager.io/cluster-issuer: letsencrypt-prod + nginx.ingress.kubernetes.io/auth-tls-secret: mcp-system/gateway-client-ca + nginx.ingress.kubernetes.io/auth-tls-verify-client: "on" + nginx.ingress.kubernetes.io/auth-tls-verify-depth: "2" + nginx.ingress.kubernetes.io/auth-tls-pass-certificate-to-upstream: "true" + nginx.ingress.kubernetes.io/configuration-snippet: | + proxy_set_header X-SSL-Client-S-DN $ssl_client_s_dn; + proxy_set_header X-SSL-Client-S-DN-CN $ssl_client_s_dn_cn; + proxy_set_header X-Authenticated-User $ssl_client_s_dn_cn; + tls: + enabled: true + secretName: gateway-tls # cert-manager will generate this + + secret: + MCP_CLIENT_AUTH_ENABLED: "false" + TRUST_PROXY_AUTH: "true" + PROXY_USER_HEADER: X-SSL-Client-S-DN-CN +``` + +Create the `gateway-client-ca` secret in the same namespace as the release so the Ingress controller can validate client certificates. For example: + +```bash +kubectl create secret generic gateway-client-ca \ + --from-file=ca.crt=certs/mtls/ca.crt \ + --namespace mcp-system +``` + +### Option 2: Kubernetes with Istio mTLS + +Deploy MCP Gateway with automatic mTLS in Istio service mesh: + +```yaml +# gateway-deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: mcp-gateway + namespace: mcp-system +spec: + template: + metadata: + labels: + app: mcp-gateway + annotations: + sidecar.istio.io/inject: "true" + spec: + containers: + - name: mcp-gateway + image: ghcr.io/ibm/mcp-context-forge:latest + env: + - name: MCP_CLIENT_AUTH_ENABLED + value: "false" + - name: TRUST_PROXY_AUTH + value: "true" + - name: PROXY_USER_HEADER + value: "X-SSL-Client-S-DN-CN" +--- +# peer-authentication.yaml +apiVersion: security.istio.io/v1beta1 +kind: PeerAuthentication +metadata: + name: mcp-gateway-mtls + namespace: mcp-system +spec: + selector: + matchLabels: + app: mcp-gateway + mtls: + mode: STRICT +``` + +Istio does not add `X-SSL-Client-S-DN-CN` automatically. Use an `EnvoyFilter` to extract the client certificate common name and forward it as the header referenced by `PROXY_USER_HEADER`: + +```yaml +# envoy-filter-client-cn.yaml +apiVersion: networking.istio.io/v1alpha3 +kind: EnvoyFilter +metadata: + name: append-client-cn-header + namespace: mcp-system +spec: + workloadSelector: + labels: + app: mcp-gateway + configPatches: + - applyTo: HTTP_FILTER + match: + context: SIDECAR_INBOUND + listener: + portNumber: 4444 + filterChain: + filter: + name: envoy.filters.network.http_connection_manager + patch: + operation: INSERT_BEFORE + value: + name: envoy.filters.http.lua + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.http.lua.v3.Lua + inlineCode: | + function envoy_on_request(handle) + local ssl = handle:streamInfo():downstreamSslConnection() + if ssl ~= nil and ssl:peerCertificatePresented() then + local subject = ssl:subjectPeerCertificate() + if subject ~= nil then + local cn = subject:match("CN=([^,/]+)") + if cn ~= nil then + handle:headers():replace("X-SSL-Client-S-DN-CN", cn) + end + end + end + end + function envoy_on_response(handle) + end +``` + +The filter runs in the sidecar and ensures the gateway receives the client's common name rather than the full certificate payload. + +### HAProxy with mTLS + +```haproxy +# haproxy.cfg +global + ssl-default-bind-options ssl-min-ver TLSv1.2 + tune.ssl.default-dh-param 2048 + +frontend mcp_gateway_mtls + bind *:443 ssl crt /etc/haproxy/certs/server.pem ca-file /etc/haproxy/certs/ca.crt verify required + + # Extract certificate information + http-request set-header X-SSL-Client-Cert %[ssl_c_der,base64] + http-request set-header X-SSL-Client-S-DN %[ssl_c_s_dn] + http-request set-header X-SSL-Client-S-DN-CN %[ssl_c_s_dn(CN)] + http-request set-header X-Authenticated-User %[ssl_c_s_dn(CN)] + + default_backend mcp_gateway_backend + +backend mcp_gateway_backend + server gateway gateway:4444 check +``` + +## Certificate Management + +### Certificate Generation Scripts + +Create a script for certificate management (`generate-certs.sh`): + +```bash +#!/bin/bash +set -e + +CERT_DIR="${CERT_DIR:-./certs/mtls}" +CA_DAYS="${CA_DAYS:-3650}" +CERT_DAYS="${CERT_DAYS:-365}" +KEY_SIZE="${KEY_SIZE:-4096}" + +mkdir -p "$CERT_DIR" + +# Generate CA if it doesn't exist +if [ ! -f "$CERT_DIR/ca.crt" ]; then + echo "Generating CA certificate..." + openssl req -x509 -newkey rsa:$KEY_SIZE -days $CA_DAYS -nodes \ + -keyout "$CERT_DIR/ca.key" -out "$CERT_DIR/ca.crt" \ + -subj "/C=US/ST=State/L=City/O=Organization/CN=MCP CA" + echo "CA certificate generated." +fi + +# Function to generate certificates +generate_cert() { + local name=$1 + local cn=$2 + + if [ -f "$CERT_DIR/${name}.crt" ]; then + echo "Certificate for $name already exists, skipping..." + return + fi + + echo "Generating certificate for $name (CN=$cn)..." + + # Generate private key and CSR + openssl req -newkey rsa:$KEY_SIZE -nodes \ + -keyout "$CERT_DIR/${name}.key" -out "$CERT_DIR/${name}.csr" \ + -subj "/CN=$cn" + + # Sign with CA + openssl x509 -req -in "$CERT_DIR/${name}.csr" \ + -CA "$CERT_DIR/ca.crt" -CAkey "$CERT_DIR/ca.key" \ + -CAcreateserial -out "$CERT_DIR/${name}.crt" -days $CERT_DAYS \ + -extfile <(echo "subjectAltName=DNS:$cn") + + # Create bundle + cat "$CERT_DIR/${name}.crt" "$CERT_DIR/${name}.key" > "$CERT_DIR/${name}.pem" + + # Clean up CSR + rm "$CERT_DIR/${name}.csr" + + echo "Certificate for $name generated." +} + +# Generate server certificate +generate_cert "server" "gateway.local" + +# Generate client certificates +generate_cert "admin" "admin@example.com" +generate_cert "user1" "user1@example.com" +generate_cert "service-account" "mcp-service@example.com" + +echo "All certificates generated in $CERT_DIR" +``` + +### Certificate Rotation + +Implement automatic certificate rotation: + +```yaml +# kubernetes CronJob for cert rotation +apiVersion: batch/v1 +kind: CronJob +metadata: + name: cert-rotation + namespace: mcp-system +spec: + schedule: "0 2 * * *" # Daily at 2 AM + jobTemplate: + spec: + template: + spec: + serviceAccountName: cert-rotation + containers: + - name: cert-rotator + image: bitnami/kubectl:1.30 + command: + - /bin/sh + - -c + - | + set -euo pipefail + SECRET_NAME=${CERT_SECRET:-gateway-tls} + CERT_NAME=${CERT_NAME:-gateway-tls-cert} + NAMESPACE=${TARGET_NAMESPACE:-mcp-system} + TLS_CERT=$(kubectl get secret "$SECRET_NAME" -n "$NAMESPACE" -o jsonpath='{.data.tls\.crt}') + if [ -z "$TLS_CERT" ]; then + echo "TLS secret $SECRET_NAME missing or empty" + exit 1 + fi + echo "$TLS_CERT" | base64 -d > /tmp/current.crt + if openssl x509 -checkend 604800 -noout -in /tmp/current.crt; then + echo "Certificate valid for more than 7 days" + else + echo "Certificate expiring soon, requesting renewal" + kubectl cert-manager renew "$CERT_NAME" -n "$NAMESPACE" || echo "Install the kubectl-cert_manager plugin inside the job image to enable automatic renewal" + fi + env: + - name: CERT_SECRET + value: gateway-tls + - name: CERT_NAME + value: gateway-tls-cert + - name: TARGET_NAMESPACE + value: mcp-system + volumeMounts: + - name: tmp + mountPath: /tmp + restartPolicy: OnFailure + volumes: + - name: tmp + emptyDir: {} +``` + +Create a `ServiceAccount`, `Role`, and `RoleBinding` that grant `get` access to the TLS secret and `update` access to the related `Certificate` resource so the job can request renewals. + + +> 🔧 Install the [`kubectl-cert_manager` plugin](https://cert-manager.io/docs/reference/kubectl-plugin/) or swap the command for `cmctl renew` if you prefer Jetstack's CLI image, and ensure your job image bundles both `kubectl` and `openssl`. + +## mTLS for External MCP Plugins + +External plugins that use the `STREAMABLEHTTP` transport support mutual TLS authentication between the gateway and plugin servers. This is optional—if not configured, the gateway continues to call plugins over standard HTTP/HTTPS. Enabling mTLS lets you restrict remote plugin servers to only accept connections from gateways presenting a trusted client certificate. + +### Setup Options + +Choose the approach that best fits your deployment: + +#### **Automated Deployment (Recommended for Kubernetes)** + +For production Kubernetes deployments, use the `cforge gateway` tool with cert-manager integration for automated certificate lifecycle management: + +- **See**: [cforge gateway Deployment Guide](../deployment/cforge-gateway.md) +- **Features**: Automated certificate generation, renewal, and distribution +- **Best for**: Kubernetes production deployments, GitOps workflows + +Example deployment with cert-manager: +```yaml +# mcp-stack.yaml +deployment: + type: kubernetes + namespace: mcp-gateway-prod + +gateway: + image: mcpgateway/mcpgateway:latest + mtls_enabled: true + +plugins: + - name: OPAPluginFilter + image: mcpgateway-opapluginfilter:latest + mtls_enabled: true + +certificates: + use_cert_manager: true + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer +``` + +Deploy: +```bash +cforge gateway deploy mcp-stack.yaml +``` + +#### **Manual Setup (Local Development & Testing)** + +For local development, Docker Compose, or manual certificate management: + +- **See**: [External Plugin mTLS Setup Guide](../using/plugins/mtls.md) +- **Features**: `make` targets for certificate generation, manual configuration +- **Best for**: Local development, Docker Compose deployments, custom setups + +Quick start: +```bash +# Generate complete mTLS infrastructure +make certs-mcp-all + +# Configure plugin connection +export PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" +export PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" +export PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" +``` + +### Configuration Reference + +Both approaches support the same configuration format for plugin connections: + +**YAML Configuration** (`plugins/config.yaml`): +```yaml +plugins: + - name: "MyExternalPlugin" + kind: "external" + mcp: + proto: STREAMABLEHTTP + url: https://plugin-server:8000/mcp + tls: + ca_bundle: /app/certs/plugins/ca.crt + certfile: /app/certs/plugins/gateway-client.crt + keyfile: /app/certs/plugins/gateway-client.key + verify: true + check_hostname: true +``` + +**Environment Variables** (gateway-wide defaults): +```bash +PLUGINS_CLIENT_MTLS_CA_BUNDLE=/app/certs/ca.crt +PLUGINS_CLIENT_MTLS_CERTFILE=/app/certs/client.crt +PLUGINS_CLIENT_MTLS_KEYFILE=/app/certs/client.key +PLUGINS_CLIENT_MTLS_VERIFY=true +PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME=false +``` + +**Key Options:** +- `verify`: Validate server certificate (default: `true`, recommended for production) +- `ca_bundle`: CA certificate for server validation (omit to use system CA) +- `certfile`/`keyfile`: Client certificate and key for mTLS authentication +- `check_hostname`: Verify hostname matches certificate (default: `true`) + + +## Security Best Practices + +### 1. Certificate Validation + +```nginx +# Strict certificate validation +ssl_verify_client on; +ssl_verify_depth 2; + +# Check certificate validity +ssl_session_cache shared:SSL:10m; +ssl_session_timeout 10m; + +# Enable OCSP stapling +ssl_stapling on; +ssl_stapling_verify on; +resolver 8.8.8.8 8.8.4.4 valid=300s; +resolver_timeout 5s; +``` + +### 2. Certificate Pinning + +```python +# MCP Gateway plugin for cert pinning +class CertificatePinningPlugin: + def __init__(self): + self.pinned_certs = { + "admin@example.com": "sha256:HASH...", + "service@example.com": "sha256:HASH..." + } + + async def on_request(self, request): + cert_header = request.headers.get("X-SSL-Client-Cert") + if cert_header: + cert_hash = self.calculate_hash(cert_header) + user = request.headers.get("X-Authenticated-User") + + if user in self.pinned_certs: + if self.pinned_certs[user] != cert_hash: + raise SecurityException("Certificate pin mismatch") +``` + +### 3. Audit Logging + +Configure comprehensive audit logging for mTLS connections: + +```nginx +# nginx.conf - Audit logging +log_format mtls_audit '$remote_addr - $ssl_client_s_dn [$time_local] ' + '"$request" $status $body_bytes_sent ' + '"$http_user_agent" cert_verify:$ssl_client_verify'; + +access_log /var/log/nginx/mtls-audit.log mtls_audit; +``` + +### 4. Rate Limiting by Certificate + +```nginx +# Rate limit by certificate CN +limit_req_zone $ssl_client_s_dn_cn zone=cert_limit:10m rate=10r/s; + +location / { + limit_req zone=cert_limit burst=20 nodelay; + proxy_pass http://mcp-gateway; +} +``` + +## Monitoring & Troubleshooting + +### Health Checks + +```bash +# Check mTLS connectivity +openssl s_client -connect gateway.local:443 \ + -cert certs/mtls/client.crt \ + -key certs/mtls/client.key \ + -CAfile certs/mtls/ca.crt \ + -showcerts + +# Verify certificate +openssl x509 -in certs/mtls/client.crt -text -noout + +# Test with curl +curl -v --cert certs/mtls/client.pem \ + --cacert certs/mtls/ca.crt \ + https://gateway.local/health +``` + +### Common Issues + +| Issue | Cause | Solution | +|-------|-------|----------| +| `SSL certificate verify error` | Missing/invalid client cert | Ensure client cert is valid and signed by CA | +| `400 No required SSL certificate` | mTLS not configured | Check `ssl_verify_client on` in proxy | +| `X-Authenticated-User missing` | Header not passed | Verify proxy_set_header configuration | +| `Connection refused` | Service not running | Check docker-compose logs | +| `Certificate expired` | Cert past validity | Regenerate certificates | + +### Debug Logging + +Enable debug logging in your reverse proxy: + +```nginx +# nginx.conf +error_log /var/log/nginx/error.log debug; + +# Log SSL handshake details +ssl_session_cache shared:SSL:10m; +ssl_session_timeout 10m; +``` + +## Migration Path + +### From JWT to mTLS + +1. **Phase 1**: Deploy proxy with mTLS alongside existing JWT auth +2. **Phase 2**: Run dual-mode (both JWT and mTLS accepted) +3. **Phase 3**: Migrate all clients to certificates +4. **Phase 4**: Disable JWT, enforce mTLS only + +```yaml +# Dual-mode configuration +environment: + # Accept both methods during migration + - MCP_CLIENT_AUTH_ENABLED=true # Keep JWT active + - TRUST_PROXY_AUTH=true # Also trust proxy + - PROXY_USER_HEADER=X-SSL-Client-S-DN-CN +``` + +## Helm Chart Configuration + +The MCP Gateway Helm chart in `charts/mcp-stack/` provides extensive configuration options for TLS and security: + +### Key Security Settings in values.yaml + +```yaml +mcpContextForge: + # JWT Configuration - supports both HMAC and RSA + secret: + JWT_ALGORITHM: HS256 # or RS256 for asymmetric + JWT_SECRET_KEY: my-test-key # for HMAC algorithms + # For RSA/ECDSA, mount keys and set: + # JWT_PUBLIC_KEY_PATH: /app/certs/jwt/public.pem + # JWT_PRIVATE_KEY_PATH: /app/certs/jwt/private.pem + + # Security Headers (enabled by default) + config: + SECURITY_HEADERS_ENABLED: "true" + X_FRAME_OPTIONS: DENY + HSTS_ENABLED: "true" + HSTS_MAX_AGE: "31536000" + SECURE_COOKIES: "true" + + # Ingress with TLS + ingress: + enabled: true + tls: + enabled: true + secretName: gateway-tls +``` + +### Deploying with Helm and mTLS + +```bash +# Create namespace +kubectl create namespace mcp-gateway + +# Install with custom TLS settings +helm install mcp-gateway ./charts/mcp-stack \ + --namespace mcp-gateway \ + --set mcpContextForge.ingress.tls.enabled=true \ + --set mcpContextForge.secret.JWT_ALGORITHM=RS256 \ + --values custom-values.yaml +``` + +## Future Native mTLS Support + +When native mTLS support lands ([#568](https://github.com/IBM/mcp-context-forge/issues/568)), expect: + +- Direct TLS termination in MCP Gateway +- Certificate-based authorization policies +- Integration with enterprise PKI systems +- Built-in certificate validation and revocation checking +- Automatic certificate rotation +- Per-service certificate management + +## Related Documentation + +- [Proxy Authentication](./proxy.md) - Configuring proxy-based authentication +- [Security Features](../architecture/security-features.md) - Overall security architecture +- [Deployment Guide](../deployment/index.md) - Production deployment options +- [Authentication Overview](./securing.md) - All authentication methods diff --git a/docs/docs/using/plugins/.pages b/docs/docs/using/plugins/.pages index e655c3e3d..a1003a7ac 100644 --- a/docs/docs/using/plugins/.pages +++ b/docs/docs/using/plugins/.pages @@ -2,3 +2,4 @@ nav: - index.md - lifecycle.md - plugins.md + - mtls.md diff --git a/docs/docs/using/plugins/mtls.md b/docs/docs/using/plugins/mtls.md new file mode 100644 index 000000000..63d80fe37 --- /dev/null +++ b/docs/docs/using/plugins/mtls.md @@ -0,0 +1,500 @@ +# External Plugin mTLS Setup Guide + +This guide covers how to set up mutual TLS (mTLS) authentication between the MCP Gateway and external plugin servers. + +## Port Configuration + +**Standard port convention:** +- **Port 8000**: Main plugin service (HTTP or HTTPS/mTLS) +- **Port 9000**: Health check endpoint (automatically starts on port+1000 when mTLS is enabled) + +When mTLS is enabled, the plugin runtime automatically starts a separate HTTP-only health check server on port 9000 (configurable via `port + 1000` formula). This allows health checks without requiring mTLS client certificates. + +## Certificate Generation + +The MCP Gateway includes Makefile targets to manage the complete certificate infrastructure for plugin mTLS. + +### Quick Start + +```bash +# Generate complete mTLS infrastructure (recommended) +make certs-mcp-all + +# This automatically: +# 1. Creates a Certificate Authority (CA) +# 2. Generates gateway client certificate +# 3. Reads plugins/external/config.yaml and generates server certificates for all external plugins +``` + +**Certificate validity**: Default is **825 days** (~2.25 years) + +**Output structure**: +``` +certs/mcp/ +├── ca/ # Certificate Authority +│ ├── ca.key # CA private key (protect!) +│ └── ca.crt # CA certificate +├── gateway/ # Gateway client certificates +│ ├── client.key # Client private key +│ ├── client.crt # Client certificate +│ └── ca.crt # Copy of CA cert +└── plugins/ # Plugin server certificates + └── PluginName/ + ├── server.key # Server private key + ├── server.crt # Server certificate + └── ca.crt # Copy of CA cert +``` + +### Makefile Targets + +#### `make certs-mcp-all` + +Generate complete mTLS infrastructure. This is the **recommended** command for setting up mTLS. + +**What it does**: +1. Calls `certs-mcp-ca` to create the CA (if not exists) +2. Calls `certs-mcp-gateway` to create gateway client certificate (if not exists) +3. Reads `plugins/external/config.yaml` and generates certificates for all plugins with `kind: external` + +**Usage**: +```bash +# Use default config file (plugins/external/config.yaml) +make certs-mcp-all + +# Use custom config file +make certs-mcp-all MCP_PLUGIN_CONFIG=path/to/custom-config.yaml + +# Custom certificate validity (in days) +make certs-mcp-all MCP_CERT_DAYS=365 + +# Combine both options +make certs-mcp-all MCP_PLUGIN_CONFIG=config.yaml MCP_CERT_DAYS=730 +``` + +**Config file format** (`plugins/external/config.yaml`): +```yaml +plugins: + - name: "MyPlugin" # Certificate will be created for this plugin + kind: "external" # Must be "external" + mcp: + proto: STREAMABLEHTTP + url: http://127.0.0.1:8000/mcp + + - name: "AnotherPlugin" + kind: "external" + mcp: + proto: STREAMABLEHTTP + url: http://127.0.0.1:8001/mcp +``` + +**Fallback behavior**: If the config file doesn't exist or PyYAML is not installed, example certificates are generated for `example-plugin-a` and `example-plugin-b`. + +#### `make certs-mcp-ca` + +Generate the Certificate Authority (CA) for plugin mTLS. This is typically called automatically by other targets. + +**What it does**: +- Creates `certs/mcp/ca/ca.key` (4096-bit RSA private key) +- Creates `certs/mcp/ca/ca.crt` (CA certificate) +- Sets file permissions: `600` for `.key`, `644` for `.crt` + +**Usage**: +```bash +# Generate CA (one-time setup) +make certs-mcp-ca + +# Custom validity +make certs-mcp-ca MCP_CERT_DAYS=1825 +``` + +**Safety**: Won't overwrite existing CA. To regenerate, delete `certs/mcp/ca/` first. + +**⚠️ Warning**: The CA private key (`ca.key`) is critical. Protect it carefully! + +#### `make certs-mcp-gateway` + +Generate the gateway client certificate used by the MCP Gateway to authenticate to plugin servers. + +**What it does**: +- Depends on `certs-mcp-ca` (creates CA if needed) +- Creates `certs/mcp/gateway/client.key` (4096-bit RSA private key) +- Creates `certs/mcp/gateway/client.crt` (client certificate signed by CA) +- Copies `ca.crt` to `certs/mcp/gateway/` + +**Usage**: +```bash +# Generate gateway client certificate +make certs-mcp-gateway + +# Custom validity +make certs-mcp-gateway MCP_CERT_DAYS=365 +``` + +**Safety**: Won't overwrite existing certificate. + +#### `make certs-mcp-plugin` + +Generate a server certificate for a specific plugin. + +**What it does**: +- Depends on `certs-mcp-ca` (creates CA if needed) +- Creates `certs/mcp/plugins//server.key` +- Creates `certs/mcp/plugins//server.crt` with Subject Alternative Names (SANs): + - `DNS:` + - `DNS:mcp-plugin-` + - `DNS:localhost` +- Copies `ca.crt` to plugin directory + +**Usage**: +```bash +# Generate certificate for specific plugin +make certs-mcp-plugin PLUGIN_NAME=MyCustomPlugin + +# Custom validity +make certs-mcp-plugin PLUGIN_NAME=MyPlugin MCP_CERT_DAYS=365 +``` + +**Required**: `PLUGIN_NAME` parameter must be provided. + +**Use case**: Add a new plugin after running `certs-mcp-all`, or generate certificates manually. + +#### `make certs-mcp-check` + +Check expiry dates of all MCP certificates. + +**What it does**: +- Displays expiry dates for CA, gateway client, and all plugin certificates +- Shows remaining validity period + +**Usage**: +```bash +make certs-mcp-check +``` + +**Output example**: +``` +🔍 Checking MCP certificate expiry dates... + +📋 CA Certificate: + Expires: Jan 15 10:30:45 2027 GMT + +📋 Gateway Client Certificate: + Expires: Jan 15 10:31:22 2027 GMT + +📋 Plugin Certificates: + MyPlugin: Jan 15 10:32:10 2027 GMT + AnotherPlugin: Jan 15 10:32:45 2027 GMT +``` + +### Certificate Properties + +All certificates generated include: +- **Algorithm**: RSA with SHA-256 +- **CA Key Size**: 4096 bits +- **Client/Server Key Size**: 4096 bits +- **Default Validity**: 825 days +- **Subject Alternative Names** (plugins): DNS entries for plugin name and localhost + +### Important Notes + +1. **All `ca.crt` files are identical** - They are copies of the root CA certificate distributed to each location for convenience + +2. **Safety features** - Commands won't overwrite existing certificates. To regenerate, delete the target directory first + +3. **File permissions** - Automatically set to secure values: + - Private keys (`.key`): `600` (owner read/write only) + - Certificates (`.crt`): `644` (world-readable) + +4. **Configuration variables**: + - `MCP_CERT_DAYS`: Certificate validity in days (default: 825) + - `MCP_PLUGIN_CONFIG`: Path to plugin config file (default: `plugins/external/config.yaml`) + +## Configuration Options + +You can configure mTLS using either YAML files or environment variables. + +### Option 1: YAML Configuration + +#### Server Configuration (Plugin) + +In your plugin config file (e.g., `plugins/test.yaml`): + +```yaml +plugins: + - name: "ReplaceBadWordsPlugin" + kind: "plugins.regex_filter.search_replace.SearchReplacePlugin" + # ... plugin config ... + +server_settings: + host: "127.0.0.1" + port: 8000 + tls: + certfile: certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt + keyfile: certs/mcp/plugins/ReplaceBadWordsPlugin/server.key + ca_bundle: certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt + ssl_cert_reqs: 2 # 2 = CERT_REQUIRED (enforce client certificates) +``` + +Start the server (for testing): +```bash +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py +``` + +#### Client Configuration (Gateway) + +In your gateway plugin config file (e.g., `plugins/external/config-client.yaml`): + +```yaml +plugins: + - name: "ReplaceBadWordsPlugin" + kind: "external" + mcp: + proto: STREAMABLEHTTP + url: https://127.0.0.1:8000/mcp + tls: + certfile: certs/mcp/gateway/client.crt + keyfile: certs/mcp/gateway/client.key + ca_bundle: certs/mcp/gateway/ca.crt + verify: true + check_hostname: false +``` + +### Option 2: Environment Variables + +#### Server Environment Variables + +```bash +# Server configuration +export PLUGINS_SERVER_HOST="127.0.0.1" +export PLUGINS_SERVER_PORT="8000" +export PLUGINS_SERVER_SSL_ENABLED="true" + +# TLS/mTLS configuration +export PLUGINS_SERVER_SSL_KEYFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.key" +export PLUGINS_SERVER_SSL_CERTFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt" +export PLUGINS_SERVER_SSL_CA_CERTS="certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt" +export PLUGINS_SERVER_SSL_CERT_REQS="2" # 2 = CERT_REQUIRED +``` + +Start the server (YAML without `server_settings` section for testing): +```bash +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py +``` + +#### Client Environment Variables + +```bash +export PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" +export PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" +export PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" +export PLUGINS_CLIENT_MTLS_VERIFY="true" +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" +``` + +Run your gateway code (YAML without `tls` section in `mcp` config). + +## Environment Variable Reference + +### Server Variables (Plugin) + +| Variable | Description | Example | +|----------|-------------|---------| +| `PLUGINS_SERVER_HOST` | Server bind address | `127.0.0.1` | +| `PLUGINS_SERVER_PORT` | Server bind port | `8000` | +| `PLUGINS_SERVER_SSL_ENABLED` | Enable SSL/TLS | `true` | +| `PLUGINS_SERVER_SSL_KEYFILE` | Path to server private key | `certs/.../server.key` | +| `PLUGINS_SERVER_SSL_CERTFILE` | Path to server certificate | `certs/.../server.crt` | +| `PLUGINS_SERVER_SSL_CA_CERTS` | Path to CA bundle | `certs/.../ca.crt` | +| `PLUGINS_SERVER_SSL_CERT_REQS` | Client cert requirement (0-2) | `2` | +| `PLUGINS_SERVER_SSL_KEYFILE_PASSWORD` | Password for encrypted key | `password` | + +**`ssl_cert_reqs` values:** +- `0` = `CERT_NONE` - No client certificate required +- `1` = `CERT_OPTIONAL` - Client certificate requested but not required +- `2` = `CERT_REQUIRED` - Client certificate required (mTLS) + +### Client Variables (Gateway) + +| Variable | Description | Example | +|----------|-------------|---------| +| `PLUGINS_CLIENT_MTLS_CERTFILE` | Path to client certificate | `certs/.../client.crt` | +| `PLUGINS_CLIENT_MTLS_KEYFILE` | Path to client private key | `certs/.../client.key` | +| `PLUGINS_CLIENT_MTLS_CA_BUNDLE` | Path to CA bundle | `certs/.../ca.crt` | +| `PLUGINS_CLIENT_MTLS_VERIFY` | Verify server certificate | `true` | +| `PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME` | Verify server hostname | `false` | +| `PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD` | Password for encrypted key | `password` | + +## Testing mTLS + +### Test without TLS + +```bash +# Server +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + PLUGINS_SERVER_HOST="127.0.0.1" \ + PLUGINS_SERVER_PORT="8000" \ + PLUGINS_SERVER_SSL_ENABLED="false" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client config should use: url: http://127.0.0.1:8000/mcp +``` + +### Test with mTLS (YAML) + +```bash +# Server (config has server_settings.tls section) +PYTHONPATH=. PLUGINS_CONFIG_PATH="plugins/test.mtls.yaml" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client (config has mcp.tls section) +python3 your_client.py +``` + +### Test with mTLS (Environment Variables) + +```bash +# Server (config has no server_settings section) +# Note: When mTLS is enabled, a health check server automatically starts on port 9000 (port+1000) +PYTHONPATH=. \ + PLUGINS_CONFIG_PATH="plugins/test.yaml" \ + PLUGINS_SERVER_HOST="127.0.0.1" \ + PLUGINS_SERVER_PORT="8000" \ + PLUGINS_SERVER_SSL_ENABLED="true" \ + PLUGINS_SERVER_SSL_KEYFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.key" \ + PLUGINS_SERVER_SSL_CERTFILE="certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt" \ + PLUGINS_SERVER_SSL_CA_CERTS="certs/mcp/plugins/ReplaceBadWordsPlugin/ca.crt" \ + PLUGINS_SERVER_SSL_CERT_REQS="2" \ + python3 mcpgateway/plugins/framework/external/mcp/server/runtime.py & + +# Client (config has no mcp.tls section) +PLUGINS_CLIENT_MTLS_CERTFILE="certs/mcp/gateway/client.crt" \ + PLUGINS_CLIENT_MTLS_KEYFILE="certs/mcp/gateway/client.key" \ + PLUGINS_CLIENT_MTLS_CA_BUNDLE="certs/mcp/gateway/ca.crt" \ + PLUGINS_CLIENT_MTLS_VERIFY="true" \ + PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" \ + python3 your_client.py +``` + +## How mTLS Works + +1. **Certificate Authority (CA)**: A single root CA (`ca.crt`) signs both client and server certificates +2. **Server Certificate**: Plugin server presents its certificate (`server.crt`) to clients +3. **Client Certificate**: Gateway presents its certificate (`client.crt`) to the plugin server +4. **Mutual Verification**: Both parties verify each other's certificates against the CA bundle +5. **Secure Channel**: After mutual authentication, all communication is encrypted + +## Configuration Priority + +Environment variables take precedence over YAML configuration: +- If `PLUGINS_SERVER_SSL_ENABLED=true`, env vars override `server_settings.tls` +- If client env vars are set, they override `mcp.tls` in YAML + +## Hostname Verification (`check_hostname`) + +### Overview +`check_hostname` is a **client-side only** setting that verifies the server's certificate matches the hostname/IP you're connecting to. + +### How It Works +The client checks if the URL hostname matches entries in the server certificate's: +- **Common Name (CN)**: `CN=mcp-plugin-ReplaceBadWordsPlugin` +- **Subject Alternative Names (SANs)**: DNS names or IP addresses + +### Checking Certificate SANs +```bash +# View DNS and IP SANs in server certificate +openssl x509 -in certs/mcp/plugins/ReplaceBadWordsPlugin/server.crt -text -noout | grep -A 5 "Subject Alternative Name" + +# Example output: +# X509v3 Subject Alternative Name: +# DNS:ReplaceBadWordsPlugin, DNS:mcp-plugin-ReplaceBadWordsPlugin, DNS:localhost +``` + +### Configuration Examples + +#### Option 1: Use `localhost` with `check_hostname: true` +```yaml +# Client config +mcp: + url: https://localhost:8000/mcp + tls: + check_hostname: true # Works because "localhost" is in DNS SANs +``` + +Or with environment variables: +```bash +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# Connect to: https://localhost:8000/mcp +``` + +#### Option 2: Use IP address with `check_hostname: false` +```yaml +# Client config +mcp: + url: https://127.0.0.1:8000/mcp + tls: + check_hostname: false # Required because 127.0.0.1 is not in SANs +``` + +Or with environment variables: +```bash +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false" +# Connect to: https://127.0.0.1:8000/mcp +``` + +#### Option 3: Add IP SANs to certificate (Advanced) +If you need `check_hostname: true` with IP addresses, regenerate certificates with IP SANs: + +```bash +# Modify Makefile to add IP SANs when generating certificates +# Add to server.ext or openssl command: +# subjectAltName = DNS:localhost, DNS:plugin-name, IP:127.0.0.1, IP:0.0.0.0 +``` + +### Server-Side Hostname Verification +There is **no** `check_hostname` setting on the server side. The server only: +1. Verifies the client certificate is signed by the trusted CA +2. Checks if `ssl_cert_reqs=2` (CERT_REQUIRED) to enforce client certificates + +### Testing Hostname Verification + +#### Test 1: Valid hostname (should succeed) +```bash +# Server bound to 0.0.0.0 (accepts all interfaces) +PLUGINS_SERVER_HOST="0.0.0.0" ... + +# Client connecting to localhost with hostname check +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# URL: https://localhost:8000/mcp +# Result: ✅ Success (localhost is in DNS SANs) +``` + +#### Test 2: IP address with hostname check (should fail) +```bash +# Client connecting to IP with hostname check +export PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="true" +# URL: https://127.0.0.1:8000/mcp +# Result: ❌ Fails with "IP address mismatch, certificate is not valid for '127.0.0.1'" +``` + +## Troubleshooting + +### Connection Refused +- Ensure server is running: `lsof -i :8000` +- Check server logs for startup errors +- Verify server is bound to correct interface (0.0.0.0 for all, 127.0.0.1 for localhost only) +- Note: When mTLS is enabled, a health check server also runs on port 9000 (port+1000) + +### Certificate Verification Failed +- Verify CA bundle matches on both sides: `md5 certs/**/ca.crt` +- Check certificate paths are correct +- Ensure certificates haven't expired: `openssl x509 -in cert.crt -noout -dates` + +### Hostname Verification Failed +Error: `certificate verify failed: IP address mismatch` or `Hostname mismatch` + +**Solutions:** +1. **Use hostname from SANs**: Connect to `https://localhost:8000` instead of `https://127.0.0.1:8000` +2. **Disable hostname check**: Set `check_hostname: false` or `PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME="false"` +3. **Add IP to SANs**: Regenerate certificates with IP SANs included \ No newline at end of file diff --git a/examples/deployment-configs/cert-manager-issuer-example.yaml b/examples/deployment-configs/cert-manager-issuer-example.yaml new file mode 100644 index 000000000..5b96aae91 --- /dev/null +++ b/examples/deployment-configs/cert-manager-issuer-example.yaml @@ -0,0 +1,58 @@ +# cert-manager CA Issuer Setup (APPLY ONCE) +# This example shows how to set up a self-signed CA using cert-manager +# for issuing mTLS certificates to the MCP Gateway and plugins. +# +# Prerequisites: +# - cert-manager must be installed in your cluster +# Install: kubectl apply -f https://github.com/cert-manager/cert-manager/releases/download/v1.13.0/cert-manager.yaml +# +# Usage: +# 1. Create namespace: kubectl create namespace mcp-gateway-test +# 2. Apply this file ONCE: kubectl apply -f cert-manager-issuer-example.yaml +# 3. Deploy stack with use_cert_manager: true in mcp-stack.yaml +# +# NOTE: This creates long-lived infrastructure (CA + Issuer). +# Do NOT delete this when tearing down your MCP stack deployment. +# The CA certificate will be reused across deployments. +# +--- +# Self-signed Issuer (used to create the CA certificate) +apiVersion: cert-manager.io/v1 +kind: Issuer +metadata: + name: mcp-selfsigned-issuer + namespace: mcp-gateway-test +spec: + selfSigned: {} + +--- +# CA Certificate (root of trust for all mTLS certificates) +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-ca-certificate + namespace: mcp-gateway-test +spec: + isCA: true + commonName: mcp-ca + secretName: mcp-ca-secret + duration: 19800h # 825 days (≈ 2.25 years) + renewBefore: 13200h # Renew at 2/3 of lifetime + privateKey: + algorithm: RSA + size: 4096 + issuerRef: + name: mcp-selfsigned-issuer + kind: Issuer + +--- +# CA Issuer (used to sign gateway and plugin certificates) +# This is what your mcp-stack.yaml references via cert_manager_issuer +apiVersion: cert-manager.io/v1 +kind: Issuer +metadata: + name: mcp-ca-issuer + namespace: mcp-gateway-test +spec: + ca: + secretName: mcp-ca-secret diff --git a/examples/deployment-configs/deploy-compose.mtls.yaml b/examples/deployment-configs/deploy-compose.mtls.yaml new file mode 100644 index 000000000..a33fa1757 --- /dev/null +++ b/examples/deployment-configs/deploy-compose.mtls.yaml @@ -0,0 +1,99 @@ +# MCP Stack - Local Docker Compose Test Configuration +# This config deploys MCP Gateway + external plugins locally with mTLS + +deployment: + type: compose + project_name: mcp-stack-test + +# MCP Gateway configuration +gateway: + # Use local gateway image (build first with: make container-build) + image: mcpgateway/mcpgateway:latest + + port: 4444 + host_port: 4444 # Expose on localhost:4444 + + # Environment configuration + # env_file will auto-detect deploy/env/.env.gateway if not specified + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + + # Enable features + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + + # Auth + AUTH_REQUIRED: "false" # Disabled for easy testing + + # Federation + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true # Verify server certificates (default: true) + mtls_check_hostname: false # Don't verify hostname (default: false for compose) + + # Note: plugins-config.yaml is auto-generated from the plugins section below + # No need to specify config_file anymore! + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Build from GitHub repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + expose_port: true # Expose for testing + + # env_file will auto-detect deploy/env/.env.OPAPluginFilter if not specified + env_vars: + LOG_LEVEL: DEBUG + + # OPA-specific settings + OPA_POLICY_PATH: /app/policies + + # mTLS server configuration + mtls_enabled: true + + # Plugin manager overrides (client-side configuration) + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement for tool and resource filtering" + tags: ["security", "policy", "opa"] + + # LLMGuard Plugin (content filtering) + #- name: LLMGuardPlugin + + # Build from GitHub repository + # repo: https://github.com/terylt/mcp-context-forge.git + # ref: feat/use_mtls_plugins + # context: plugins/external/llmguard + # containerfile: Containerfile + # target: builder # Build only the 'builder' stage (multi-stage build) + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + # port: 8001 + # expose_port: true + + # env_file will auto-detect deploy/env/.env.LLMGuardPlugin if not specified + # env_vars: + # LOG_LEVEL: DEBUG + + # mtls_enabled: true + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-compose.yaml b/examples/deployment-configs/deploy-compose.yaml new file mode 100644 index 000000000..d1c3c89fa --- /dev/null +++ b/examples/deployment-configs/deploy-compose.yaml @@ -0,0 +1,96 @@ +# MCP Stack - Local Docker Compose Test Configuration +# This config deploys MCP Gateway + external plugins locally with mTLS + +deployment: + type: compose + project_name: mcp-stack-test + +# MCP Gateway configuration +gateway: + # Use local gateway image (build first with: make container-build) + image: mcpgateway/mcpgateway:latest + + port: 4444 + host_port: 4444 # Expose on localhost:4444 + + # Environment configuration + # env_file will auto-detect deploy/env/.env.gateway if not specified + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + # Enable features + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + + # Auth + AUTH_REQUIRED: "false" # Disabled for easy testing + + # Federation + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: false + + # Note: plugins-config.yaml is auto-generated from the plugins section below + # No need to specify config_file anymore! + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Build from GitHub repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + expose_port: true # Expose for testing + + # env_file will auto-detect deploy/env/.env.OPAPluginFilter if not specified + env_vars: + LOG_LEVEL: DEBUG + + # OPA-specific settings + OPA_POLICY_PATH: /app/policies + + # mTLS server configuration + mtls_enabled: false + + # Plugin manager overrides (client-side configuration) + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement for tool and resource filtering" + tags: ["security", "policy", "opa"] + + # LLMGuard Plugin (content filtering) + #- name: LLMGuardPlugin + + # # Build from GitHub repository + # repo: https://github.com/terylt/mcp-context-forge.git + # ref: feat/use_mtls_plugins + # context: plugins/external/llmguard + # containerfile: Containerfile + # target: builder # Build only the 'builder' stage (multi-stage build) + + # Defaults: port=8000, host_port auto-assigned (8000, 8001, ...) + # port: 8001 + # expose_port: true + + # env_file will auto-detect deploy/env/.env.LLMGuardPlugin if not specified + # env_vars: + # LOG_LEVEL: DEBUG + + # mtls_enabled: false + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-k8s-cert-manager.yaml b/examples/deployment-configs/deploy-k8s-cert-manager.yaml new file mode 100644 index 000000000..d59c7bc57 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s-cert-manager.yaml @@ -0,0 +1,100 @@ +# MCP Stack - Kubernetes Configuration with cert-manager +# This config uses cert-manager for automatic certificate management +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # ≈ 2.25 years + + # Local paths not used when use_cert_manager=true + # (included for backward compatibility if switching back) + auto_generate: false + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-k8s.mtls.yaml b/examples/deployment-configs/deploy-k8s.mtls.yaml new file mode 100644 index 000000000..32e653406 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s.mtls.yaml @@ -0,0 +1,84 @@ +# MCP Stack - Kubernetes Test Configuration +# Simple test config using pre-built images + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true # Verify server certificates (default: true) + mtls_check_hostname: false # Don't verify hostname (default: false for compose) + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-k8s.yaml b/examples/deployment-configs/deploy-k8s.yaml new file mode 100644 index 000000000..518e61bd4 --- /dev/null +++ b/examples/deployment-configs/deploy-k8s.yaml @@ -0,0 +1,82 @@ +# MCP Stack - Kubernetes Test Configuration +# Simple test config using pre-built images + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Use pre-built gateway image + image: mcpgateway/mcpgateway:latest + image_pull_policy: IfNotPresent + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS disabled for simplicity + mtls_enabled: false + +# External plugins +plugins: + # OPA Plugin Filter + - name: OPAPluginFilter + + # Use pre-built image for faster testing + image: mcpgateway-opapluginfilter:latest + image_pull_policy: IfNotPresent + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: false + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# mTLS Certificate configuration +certificates: + validity_days: 825 + auto_generate: true + ca_path: ./certs/mcp/ca + gateway_path: ./certs/mcp/gateway + plugins_path: ./certs/mcp/plugins diff --git a/examples/deployment-configs/deploy-openshift-local-registry.yaml b/examples/deployment-configs/deploy-openshift-local-registry.yaml new file mode 100644 index 000000000..3a18713a1 --- /dev/null +++ b/examples/deployment-configs/deploy-openshift-local-registry.yaml @@ -0,0 +1,146 @@ +# MCP Stack - OpenShift Local with Registry Push +# Build from source and push to OpenShift internal registry +# +# This example demonstrates how to build images locally and push them to +# OpenShift's internal registry. This is useful for: +# - Testing images in a production-like environment +# - Avoiding ImagePullBackOff errors when deploying to OpenShift +# - Sharing images across multiple namespaces +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. Authenticate to OpenShift internal registry: +# podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +# 4. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + container_engine: podman + openshift: + create_routes: true + domain: apps-crc.testing # Optional, auto-detected if omitted + tls_termination: edge + +# MCP Gateway configuration +gateway: + # Build gateway from current repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: . + containerfile: Containerfile + image: mcpgateway-gateway:latest + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + + # Container registry configuration + # Build locally, then tag and push to OpenShift internal registry + registry: + enabled: true + # OpenShift internal registry URL (get with: oc registry info) + url: default-route-openshift-image-registry.apps-crc.testing + # Namespace where images will be pushed (must have push permissions) + namespace: mcp-gateway-test + # Push image after build + push: true + # imagePullPolicy for Kubernetes pods + image_pull_policy: Always + +# External plugins +plugins: + # OPA Plugin Filter - build from source and push to registry + - name: OPAPluginFilter + + # Build from repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/use_mtls_plugins + context: plugins/external/opa + containerfile: Containerfile + image: mcpgateway-opapluginfilter:latest + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Container registry configuration + # Push plugin image to same registry as gateway + registry: + enabled: true + url: default-route-openshift-image-registry.apps-crc.testing + namespace: mcp-gateway-test + push: true + image_pull_policy: Always + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# Infrastructure services +infrastructure: + postgres: + enabled: true + image: quay.io/sclorg/postgresql-15-c9s:latest + user: mcpuser # Use non-'postgres' username for Red Hat images + database: mcp + password: mysecretpassword + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # ≈ 2.25 years + + # Local paths not used when use_cert_manager=true + auto_generate: false diff --git a/examples/deployment-configs/deploy-openshift-local.yaml b/examples/deployment-configs/deploy-openshift-local.yaml new file mode 100644 index 000000000..8256478ba --- /dev/null +++ b/examples/deployment-configs/deploy-openshift-local.yaml @@ -0,0 +1,131 @@ +# MCP Stack - OpenShift Local Configuration with cert-manager +# Build from source for local development +# +# Prerequisites: +# 1. Install cert-manager in your cluster +# 2. Apply cert-manager-issuer-example.yaml to create the CA Issuer +# 3. (Optional) Authenticate to OpenShift internal registry: +# podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t) +# 4. Deploy this config + +deployment: + type: kubernetes + namespace: mcp-gateway-test + +# MCP Gateway configuration +gateway: + # Build gateway from current repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: . + containerfile: Containerfile + image: mcpgateway-gateway:latest + + port: 4444 + + # Service configuration + service_type: ClusterIP + service_port: 4444 + + # Resource limits + replicas: 1 + memory_request: 256Mi + memory_limit: 512Mi + cpu_request: 100m + cpu_limit: 500m + + # Environment configuration + env_vars: + LOG_LEVEL: DEBUG + HOST: 0.0.0.0 + PORT: 4444 + MCPGATEWAY_UI_ENABLED: "true" + MCPGATEWAY_ADMIN_API_ENABLED: "true" + MCPGATEWAY_A2A_ENABLED: "true" + AUTH_REQUIRED: "false" + MCPGATEWAY_ENABLE_FEDERATION: "false" + + # mTLS client configuration (gateway connects to plugins) + mtls_enabled: true + mtls_verify: true + mtls_check_hostname: false + + # Container registry configuration (optional) + # Uncomment to push images to OpenShift internal registry + # registry: + # enabled: true + # url: default-route-openshift-image-registry.apps-crc.testing + # namespace: mcp-gateway-test + # push: true + # image_pull_policy: Always + +# External plugins +plugins: + # OPA Plugin Filter - build from source + - name: OPAPluginFilter + + # Build from repository + repo: https://github.com/terylt/mcp-context-forge.git + ref: feat/configurable_plugin_deployment + context: plugins/external/opa + containerfile: Containerfile + image: mcpgateway-opapluginfilter:latest + + port: 8000 + + # Service configuration + service_type: ClusterIP + service_port: 8000 + + # Resource limits + replicas: 1 + memory_request: 128Mi + memory_limit: 256Mi + cpu_request: 50m + cpu_limit: 200m + + env_vars: + LOG_LEVEL: DEBUG + OPA_POLICY_PATH: /app/policies + + mtls_enabled: true + + # Container registry configuration (optional) + # Uncomment to push images to OpenShift internal registry + # registry: + # enabled: true + # url: default-route-openshift-image-registry.apps-crc.testing + # namespace: mcp-gateway-test + # push: true + # image_pull_policy: Always + + # Plugin manager overrides + plugin_overrides: + priority: 10 + mode: "enforce" + description: "OPA policy enforcement" + tags: ["security", "policy", "opa"] + +# Infrastructure services +infrastructure: + postgres: + enabled: true + image: quay.io/sclorg/postgresql-15-c9s:latest + user: mcpuser # Use non-'postgres' username for Red Hat images + database: mcp + password: mysecretpassword + +# cert-manager Certificate configuration +certificates: + # Use cert-manager for automatic certificate management + use_cert_manager: true + + # cert-manager issuer reference (must exist in namespace) + cert_manager_issuer: mcp-ca-issuer + cert_manager_kind: Issuer # or ClusterIssuer + + # Certificate validity (cert-manager will auto-renew at 2/3 of lifetime) + validity_days: 825 # ≈ 2.25 years + + # Local paths not used when use_cert_manager=true + auto_generate: false diff --git a/llms/plugins-llms.md b/llms/plugins-llms.md index 2a1543180..c2a16c353 100644 --- a/llms/plugins-llms.md +++ b/llms/plugins-llms.md @@ -116,9 +116,12 @@ Plugins: How They Work in MCP Context Forge - name: "MyFilter" kind: "external" priority: 10 - mcp: - proto: STREAMABLEHTTP - url: http://localhost:8000/mcp + mcp: + proto: STREAMABLEHTTP + url: http://localhost:8000/mcp + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem ``` - STDIO alternative: ```yaml @@ -129,7 +132,7 @@ Plugins: How They Work in MCP Context Forge proto: STDIO script: path/to/server.py ``` -- Enable framework in gateway: `.env` must set `PLUGINS_ENABLED=true` and optionally `PLUGIN_CONFIG_FILE=plugins/config.yaml`. +- Enable framework in gateway: `.env` must set `PLUGINS_ENABLED=true` and optionally `PLUGIN_CONFIG_FILE=plugins/config.yaml`. To reuse a gateway-wide mTLS client certificate for multiple external plugins, set `PLUGINS_MTLS_CA_BUNDLE`, `PLUGINS_MTLS_CLIENT_CERT`, and related `PLUGINS_MTLS_*` variables. Individual plugin `tls` blocks override these defaults. **Built‑in Plugins (Examples)** - `ArgumentNormalizer` (`plugins/argument_normalizer/argument_normalizer.py`) diff --git a/mcpgateway/plugins/framework/constants.py b/mcpgateway/plugins/framework/constants.py index 7b446624f..155679c57 100644 --- a/mcpgateway/plugins/framework/constants.py +++ b/mcpgateway/plugins/framework/constants.py @@ -32,3 +32,8 @@ TOOL_METADATA = "tool" GATEWAY_METADATA = "gateway" + +# MCP Plugin Server Runtime constants +MCP_SERVER_NAME = "MCP Plugin Server" +MCP_SERVER_INSTRUCTIONS = "External plugin server for MCP Gateway" +GET_PLUGIN_CONFIGS = "get_plugin_configs" diff --git a/mcpgateway/plugins/framework/external/mcp/client.py b/mcpgateway/plugins/framework/external/mcp/client.py index fe68fcd08..d0666eaa2 100644 --- a/mcpgateway/plugins/framework/external/mcp/client.py +++ b/mcpgateway/plugins/framework/external/mcp/client.py @@ -14,9 +14,11 @@ import json import logging import os +import ssl from typing import Any, Optional, Type, TypeVar # Third-Party +import httpx from mcp import ClientSession, StdioServerParameters from mcp.client.stdio import stdio_client from mcp.client.streamable_http import streamablehttp_client @@ -26,8 +28,10 @@ from mcpgateway.plugins.framework.base import Plugin from mcpgateway.plugins.framework.constants import CONTEXT, ERROR, GET_PLUGIN_CONFIG, IGNORE_CONFIG_EXTERNAL, NAME, PAYLOAD, PLUGIN_NAME, PYTHON, PYTHON_SUFFIX, RESULT from mcpgateway.plugins.framework.errors import convert_exception_to_error, PluginError +from mcpgateway.plugins.framework.external.mcp.tls_utils import create_ssl_context from mcpgateway.plugins.framework.models import ( HookType, + MCPClientTLSConfig, PluginConfig, PluginContext, PluginErrorModel, @@ -143,36 +147,73 @@ async def __connect_to_http_server(self, uri: str) -> None: Raises: PluginError: if there is an external connection error after all retries. """ + plugin_tls = self._config.mcp.tls if self._config and self._config.mcp else None + tls_config = plugin_tls or MCPClientTLSConfig.from_env() + + def _tls_httpx_client_factory( + headers: Optional[dict[str, str]] = None, + timeout: Optional[httpx.Timeout] = None, + auth: Optional[httpx.Auth] = None, + ) -> httpx.AsyncClient: + """Build an httpx client with TLS configuration for external MCP servers. + + Args: + headers: Optional HTTP headers to include in requests. + timeout: Optional timeout configuration for HTTP requests. + auth: Optional authentication handler for HTTP requests. + + Returns: + Configured httpx AsyncClient with TLS settings applied. + + Raises: + PluginError: If TLS configuration fails. + """ + + kwargs: dict[str, Any] = {"follow_redirects": True} + if headers: + kwargs["headers"] = headers + kwargs["timeout"] = timeout or httpx.Timeout(30.0) + if auth is not None: + kwargs["auth"] = auth + + if not tls_config: + return httpx.AsyncClient(**kwargs) + + # Create SSL context using the utility function + # This implements certificate validation per test_client_certificate_validation.py + ssl_context = create_ssl_context(tls_config, self.name) + kwargs["verify"] = ssl_context + + return httpx.AsyncClient(**kwargs) + max_retries = 3 base_delay = 1.0 for attempt in range(max_retries): - logger.info(f"Connecting to external plugin server: {uri} (attempt {attempt + 1}/{max_retries})") try: - # Create a fresh exit stack for each attempt + client_factory = _tls_httpx_client_factory if tls_config else None async with AsyncExitStack() as temp_stack: - http_transport = await temp_stack.enter_async_context(streamablehttp_client(uri)) + streamable_client = streamablehttp_client(uri, httpx_client_factory=client_factory) if client_factory else streamablehttp_client(uri) + http_transport = await temp_stack.enter_async_context(streamable_client) http_client, write_func, _ = http_transport session = await temp_stack.enter_async_context(ClientSession(http_client, write_func)) - await session.initialize() - # List available tools response = await session.list_tools() tools = response.tools logger.info("Successfully connected to plugin MCP server with tools: %s", " ".join([tool.name for tool in tools])) - # Success! Now move to the main exit stack - self._http = await self._exit_stack.enter_async_context(streamablehttp_client(uri)) - self._http, self._write, _ = self._http + client_factory = _tls_httpx_client_factory if tls_config else None + streamable_client = streamablehttp_client(uri, httpx_client_factory=client_factory) if client_factory else streamablehttp_client(uri) + http_transport = await self._exit_stack.enter_async_context(streamable_client) + self._http, self._write, _ = http_transport self._session = await self._exit_stack.enter_async_context(ClientSession(self._http, self._write)) + await self._session.initialize() return - except Exception as e: logger.warning(f"Connection attempt {attempt + 1}/{max_retries} failed: {e}") - if attempt == max_retries - 1: # Final attempt failed error_msg = f"External plugin '{self.name}' connection failed after {max_retries} attempts: {uri} is not reachable. Please ensure the MCP server is running." diff --git a/mcpgateway/plugins/framework/external/mcp/server/runtime.py b/mcpgateway/plugins/framework/external/mcp/server/runtime.py index b7bd9664f..34441d202 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/runtime.py +++ b/mcpgateway/plugins/framework/external/mcp/server/runtime.py @@ -1,20 +1,30 @@ +#!/usr/bin/env python3 # -*- coding: utf-8 -*- """Location: ./mcpgateway/plugins/framework/external/mcp/server/runtime.py Copyright 2025 SPDX-License-Identifier: Apache-2.0 -Authors: Fred Araujo +Authors: Fred Araujo, Teryl Taylor -Runtime MCP server for external plugins. +MCP Plugin Runtime using FastMCP with SSL/TLS support. + +This runtime does the following: +- Uses FastMCP from the MCP Python SDK +- Supports both mTLS and non-mTLS configurations +- Reads configuration from PLUGINS_SERVER_* environment variables or uses configurations + the plugin config.yaml +- Implements all plugin hook tools (get_plugin_configs, tool_pre_invoke, etc.) """ # Standard import asyncio import logging +import os +import sys from typing import Any, Dict # Third-Party -from chuk_mcp_runtime.common.mcp_tool_decorator import mcp_tool -from chuk_mcp_runtime.entry import main_async +from mcp.server.fastmcp import FastMCP +import uvicorn # First-Party from mcpgateway.plugins.framework import ( @@ -34,245 +44,446 @@ ToolPreInvokePayload, ToolPreInvokeResult, ) +from mcpgateway.plugins.framework.constants import ( + GET_PLUGIN_CONFIG, + GET_PLUGIN_CONFIGS, + MCP_SERVER_INSTRUCTIONS, + MCP_SERVER_NAME, +) +from mcpgateway.plugins.framework.models import HookType, MCPServerConfig logger = logging.getLogger(__name__) -SERVER = None +SERVER: ExternalPluginServer = None + + +# Module-level tool functions (extracted for testability) -@mcp_tool(name="get_plugin_configs", description="Get the plugin configurations installed on the server") async def get_plugin_configs() -> list[dict]: - """Return a list of plugin configurations for plugins currently installed on the MCP SERVER. + """Get the plugin configurations installed on the server. Returns: - A list of plugin configurations. + JSON string containing list of plugin configuration dictionaries. """ return await SERVER.get_plugin_configs() -@mcp_tool(name="get_plugin_config", description="Get the plugin configuration installed on the server given a plugin name") async def get_plugin_config(name: str) -> dict: - """Return a plugin configuration give a plugin name. + """Get the plugin configuration for a specific plugin. Args: - name: The name of the plugin of which to return the plugin configuration. + name: The name of the plugin Returns: - A list of plugin configurations. + JSON string containing plugin configuration dictionary. """ return await SERVER.get_plugin_config(name) -@mcp_tool(name="prompt_pre_fetch", description="Execute prompt prefetch hook for a plugin") async def prompt_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the prompt pre fetch hook for a particular plugin. + """Execute prompt prefetch hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The prompt name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. - - Raises: - ValueError: If unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The prompt name and arguments to be analyzed + context: Contextual information required for execution Returns: - The transformed or filtered response from the plugin hook. + Result dictionary from the prompt prefetch hook. """ def prompt_pre_fetch_func(plugin: Plugin, payload: PromptPrehookPayload, context: PluginContext) -> PromptPrehookResult: - """Wrapper function for hook. + """Wrapper function to invoke prompt prefetch on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The prompt prehook payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's prompt_pre_fetch method. """ return plugin.prompt_pre_fetch(payload, context) return await SERVER.invoke_hook(PromptPrehookPayload, prompt_pre_fetch_func, plugin_name, payload, context) -@mcp_tool(name="prompt_post_fetch", description="Execute prompt postfetch hook for a plugin") async def prompt_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Call plugin's prompt post-fetch hook. + """Execute prompt postfetch hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The prompt payload to be analyzed. - context: Contextual information about the hook call. - - Raises: - ValueError: if unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The prompt payload to be analyzed + context: Contextual information Returns: - The result of the plugin execution. + Result dictionary from the prompt postfetch hook. """ def prompt_post_fetch_func(plugin: Plugin, payload: PromptPosthookPayload, context: PluginContext) -> PromptPosthookResult: - """Wrapper function for hook. + """Wrapper function to invoke prompt postfetch on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The prompt posthook payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's prompt_post_fetch method. """ return plugin.prompt_post_fetch(payload, context) return await SERVER.invoke_hook(PromptPosthookPayload, prompt_post_fetch_func, plugin_name, payload, context) -@mcp_tool(name="tool_pre_invoke", description="Execute tool pre-invoke hook for a plugin") async def tool_pre_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the tool pre-invoke hook for a particular plugin. + """Execute tool pre-invoke hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The tool name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. - - Raises: - ValueError: If unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The tool name and arguments to be analyzed + context: Contextual information Returns: - The transformed or filtered response from the plugin hook. + Result dictionary from the tool pre-invoke hook. """ def tool_pre_invoke_func(plugin: Plugin, payload: ToolPreInvokePayload, context: PluginContext) -> ToolPreInvokeResult: - """Wrapper function for hook. + """Wrapper function to invoke tool pre-invoke on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The tool pre-invoke payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's tool_pre_invoke method. """ return plugin.tool_pre_invoke(payload, context) return await SERVER.invoke_hook(ToolPreInvokePayload, tool_pre_invoke_func, plugin_name, payload, context) -@mcp_tool(name="tool_post_invoke", description="Execute tool post-invoke hook for a plugin") async def tool_post_invoke(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the tool post-invoke hook for a particular plugin. + """Execute tool post-invoke hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. - - Raises: - ValueError: If unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The tool result to be analyzed + context: Contextual information Returns: - The transformed or filtered response from the plugin hook. + Result dictionary from the tool post-invoke hook. """ def tool_post_invoke_func(plugin: Plugin, payload: ToolPostInvokePayload, context: PluginContext) -> ToolPostInvokeResult: - """Wrapper function for hook. + """Wrapper function to invoke tool post-invoke on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The tool post-invoke payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's tool_post_invoke method. """ return plugin.tool_post_invoke(payload, context) return await SERVER.invoke_hook(ToolPostInvokePayload, tool_post_invoke_func, plugin_name, payload, context) -@mcp_tool(name="resource_pre_fetch", description="Execute resource prefetch hook for a plugin") async def resource_pre_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Invoke the resource pre fetch hook for a particular plugin. + """Execute resource prefetch hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The resource name and arguments to be analyzed. - context: The contextual and state information required for the execution of the hook. - - Raises: - ValueError: If unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The resource name and arguments to be analyzed + context: Contextual information Returns: - The transformed or filtered response from the plugin hook. + Result dictionary from the resource prefetch hook. """ - def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: # pragma: no cover - """Wrapper function for hook. + def resource_pre_fetch_func(plugin: Plugin, payload: ResourcePreFetchPayload, context: PluginContext) -> ResourcePreFetchResult: + """Wrapper function to invoke resource prefetch on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The resource prefetch payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's resource_pre_fetch method. """ return plugin.resource_pre_fetch(payload, context) return await SERVER.invoke_hook(ResourcePreFetchPayload, resource_pre_fetch_func, plugin_name, payload, context) -@mcp_tool(name="resource_post_fetch", description="Execute resource postfetch hook for a plugin") async def resource_post_fetch(plugin_name: str, payload: Dict[str, Any], context: Dict[str, Any]) -> dict: - """Call plugin's resource post-fetch hook. + """Execute resource postfetch hook for a plugin. Args: - plugin_name: The name of the plugin to execute. - payload: The resource payload to be analyzed. - context: Contextual information about the hook call. - - Raises: - ValueError: if unable to retrieve a plugin. + plugin_name: The name of the plugin to execute + payload: The resource payload to be analyzed + context: Contextual information Returns: - The result of the plugin execution. + Result dictionary from the resource postfetch hook. """ - def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: # pragma: no cover - """Wrapper function for hook. + def resource_post_fetch_func(plugin: Plugin, payload: ResourcePostFetchPayload, context: PluginContext) -> ResourcePostFetchResult: + """Wrapper function to invoke resource postfetch on a plugin instance. Args: - plugin: The plugin instance. - payload: The tool name and arguments to be analyzed. - context: the contextual and state information required for the execution of the hook. + plugin: The plugin instance to execute. + payload: The resource postfetch payload. + context: The plugin context. Returns: - The transformed or filtered response from the plugin hook. + Result from the plugin's resource_post_fetch method. """ return plugin.resource_post_fetch(payload, context) return await SERVER.invoke_hook(ResourcePostFetchPayload, resource_post_fetch_func, plugin_name, payload, context) -async def run(): # pragma: no cover - """Run the external plugin SERVER. +class SSLCapableFastMCP(FastMCP): + """FastMCP server with SSL/TLS support using MCPServerConfig.""" + + def __init__(self, server_config: MCPServerConfig, *args, **kwargs): + """Initialize an SSL capable Fast MCP server. + + Args: + server_config: the MCP server configuration including mTLS information. + *args: Additional positional arguments passed to FastMCP. + **kwargs: Additional keyword arguments passed to FastMCP. + """ + # Load server config from environment + + self.server_config = server_config + # Override FastMCP settings with our server config + if "host" not in kwargs: + kwargs["host"] = self.server_config.host + if "port" not in kwargs: + kwargs["port"] = self.server_config.port + + super().__init__(*args, **kwargs) + + def _get_ssl_config(self) -> dict: + """Build SSL configuration for uvicorn from MCPServerConfig. + + Returns: + Dictionary of SSL configuration parameters for uvicorn. + """ + ssl_config = {} + + if self.server_config.tls: + tls = self.server_config.tls + if tls.keyfile and tls.certfile: + ssl_config["ssl_keyfile"] = tls.keyfile + ssl_config["ssl_certfile"] = tls.certfile + + if tls.ca_bundle: + ssl_config["ssl_ca_certs"] = tls.ca_bundle + + ssl_config["ssl_cert_reqs"] = tls.ssl_cert_reqs + + if tls.keyfile_password: + ssl_config["ssl_keyfile_password"] = tls.keyfile_password + + logger.info("SSL/TLS enabled (mTLS)") + logger.info(f" Key: {ssl_config['ssl_keyfile']}") + logger.info(f" Cert: {ssl_config['ssl_certfile']}") + if "ssl_ca_certs" in ssl_config: + logger.info(f" CA: {ssl_config['ssl_ca_certs']}") + logger.info(f" Client cert required: {ssl_config['ssl_cert_reqs'] == 2}") + else: + logger.warning("TLS config present but keyfile/certfile not configured") + else: + logger.info("SSL/TLS not enabled") + + return ssl_config + + async def _start_health_check_server(self, health_port: int) -> None: + """Start a simple HTTP-only health check server on a separate port. + + This allows health checks to work even when the main server uses HTTPS/mTLS. + + Args: + health_port: Port number for the health check server. + """ + # Third-Party + from starlette.applications import Starlette + from starlette.requests import Request + from starlette.responses import JSONResponse + from starlette.routing import Route + + async def health_check(request: Request): + """Health check endpoint for container orchestration. + + Args: + request: the http request from which the health check occurs. + + Returns: + JSON response with health status. + """ + return JSONResponse({"status": "healthy"}) + + # Create a minimal Starlette app with only the health endpoint + health_app = Starlette(routes=[Route("/health", health_check, methods=["GET"])]) + + logger.info(f"Starting HTTP health check server on {self.settings.host}:{health_port}") + config = uvicorn.Config( + app=health_app, + host=self.settings.host, + port=health_port, + log_level="warning", # Reduce noise from health checks + ) + server = uvicorn.Server(config) + await server.serve() + + async def run_streamable_http_async(self) -> None: + """Run the server using StreamableHTTP transport with optional SSL/TLS.""" + starlette_app = self.streamable_http_app() + + # Add health check endpoint to main app + # Third-Party + from starlette.requests import Request + from starlette.responses import JSONResponse + from starlette.routing import Route + + async def health_check(request: Request): + """Health check endpoint for container orchestration. + + Args: + request: the http request from which the health check occurs. + + Returns: + JSON response with health status. + """ + return JSONResponse({"status": "healthy"}) + + # Add the health route to the Starlette app + starlette_app.routes.append(Route("/health", health_check, methods=["GET"])) + + # Build uvicorn config with optional SSL + ssl_config = self._get_ssl_config() + config_kwargs = { + "app": starlette_app, + "host": self.settings.host, + "port": self.settings.port, + "log_level": self.settings.log_level.lower(), + } + config_kwargs.update(ssl_config) + + logger.info(f"Starting plugin server on {self.settings.host}:{self.settings.port}") + config = uvicorn.Config(**config_kwargs) + server = uvicorn.Server(config) + + # If SSL is enabled, start a separate HTTP health check server + if ssl_config: + health_port = self.settings.port + 1000 # Use port+1000 for health checks + logger.info(f"SSL enabled - starting separate HTTP health check on port {health_port}") + # Run both servers concurrently + await asyncio.gather(server.serve(), self._start_health_check_server(health_port)) + else: + # Just run the main server (health check is already on it) + await server.serve() + + +async def run(): + """Run the external plugin server with FastMCP. + + Supports both stdio and HTTP transports. Auto-detects transport based on stdin + (if stdin is not a TTY, uses stdio mode), or you can explicitly set PLUGINS_TRANSPORT. + + Reads configuration from PLUGINS_SERVER_* environment variables: + - PLUGINS_TRANSPORT: Transport type - 'stdio' or 'http' (default: auto-detect) + - PLUGINS_SERVER_HOST: Server host (default: 0.0.0.0) - HTTP mode only + - PLUGINS_SERVER_PORT: Server port (default: 8000) - HTTP mode only + - PLUGINS_SERVER_SSL_ENABLED: Enable SSL/TLS (true/false) - HTTP mode only + - PLUGINS_SERVER_SSL_KEYFILE: Path to server private key - HTTP mode only + - PLUGINS_SERVER_SSL_CERTFILE: Path to server certificate - HTTP mode only + - PLUGINS_SERVER_SSL_CA_CERTS: Path to CA bundle for client verification - HTTP mode only + - PLUGINS_SERVER_SSL_CERT_REQS: Client cert requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED) - HTTP mode only Raises: - Exception: if unnable to run the plugin SERVER. + Exception: If plugin server initialization or execution fails. """ - global SERVER # pylint: disable=global-statement + global SERVER + + # Initialize plugin server SERVER = ExternalPluginServer() - if await SERVER.initialize(): - try: - await main_async() - except Exception: - logger.exception("Caught error while executing plugin server") - raise - finally: - await SERVER.shutdown() - - -if __name__ == "__main__": # pragma: no cover - # launch + + if not await SERVER.initialize(): + logger.error("Failed to initialize plugin server") + return + + # Determine transport type from environment variable or auto-detect + # Auto-detect: if stdin is not a TTY (i.e., it's being piped), use stdio mode + transport = os.environ.get("PLUGINS_TRANSPORT", None) + if transport is None: + # Auto-detect based on stdin + if not sys.stdin.isatty(): + transport = "stdio" + logger.info("Auto-detected stdio transport (stdin is not a TTY)") + else: + transport = "http" + else: + transport = transport.lower() + + try: + if transport == "stdio": + # Create basic FastMCP server for stdio (no SSL support needed for stdio) + mcp = FastMCP( + name=MCP_SERVER_NAME, + instructions=MCP_SERVER_INSTRUCTIONS, + ) + + # Register module-level tool functions with FastMCP + mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) + mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) + mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) + mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) + mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) + mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) + mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) + mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) + + # Run with stdio transport + logger.info("Starting MCP plugin server with FastMCP (stdio transport)") + await mcp.run_stdio_async() + + else: # http or streamablehttp + # Create FastMCP server with SSL support + mcp = SSLCapableFastMCP( + server_config=SERVER.get_server_config(), + name=MCP_SERVER_NAME, + instructions=MCP_SERVER_INSTRUCTIONS, + ) + + # Register module-level tool functions with FastMCP + mcp.tool(name=GET_PLUGIN_CONFIGS)(get_plugin_configs) + mcp.tool(name=GET_PLUGIN_CONFIG)(get_plugin_config) + mcp.tool(name=HookType.PROMPT_PRE_FETCH.value)(prompt_pre_fetch) + mcp.tool(name=HookType.PROMPT_POST_FETCH.value)(prompt_post_fetch) + mcp.tool(name=HookType.TOOL_PRE_INVOKE.value)(tool_pre_invoke) + mcp.tool(name=HookType.TOOL_POST_INVOKE.value)(tool_post_invoke) + mcp.tool(name=HookType.RESOURCE_PRE_FETCH.value)(resource_pre_fetch) + mcp.tool(name=HookType.RESOURCE_POST_FETCH.value)(resource_post_fetch) + + # Run with streamable-http transport + logger.info("Starting MCP plugin server with FastMCP (HTTP transport)") + await mcp.run_streamable_http_async() + + except Exception: + logger.exception("Caught error while executing plugin server") + raise + finally: + await SERVER.shutdown() + + +if __name__ == "__main__": asyncio.run(run()) diff --git a/mcpgateway/plugins/framework/external/mcp/server/server.py b/mcpgateway/plugins/framework/external/mcp/server/server.py index c2d340e42..3772d03c1 100644 --- a/mcpgateway/plugins/framework/external/mcp/server/server.py +++ b/mcpgateway/plugins/framework/external/mcp/server/server.py @@ -26,6 +26,7 @@ from mcpgateway.plugins.framework.loader.config import ConfigLoader from mcpgateway.plugins.framework.manager import DEFAULT_PLUGIN_TIMEOUT, PluginManager from mcpgateway.plugins.framework.models import ( + MCPServerConfig, PluginContext, PluginErrorModel, PluginResult, @@ -165,6 +166,14 @@ async def initialize(self) -> bool: return self._plugin_manager.initialized async def shutdown(self) -> None: - """Shutdow the plugin server.""" + """Shutdown the plugin server.""" if self._plugin_manager.initialized: await self._plugin_manager.shutdown() + + def get_server_config(self) -> MCPServerConfig: + """Return the configuration for the plugin server. + + Returns: + A server configuration including host, port, and TLS information. + """ + return self._config.server_settings or MCPServerConfig.from_env() or MCPServerConfig() diff --git a/mcpgateway/plugins/framework/external/mcp/tls_utils.py b/mcpgateway/plugins/framework/external/mcp/tls_utils.py new file mode 100644 index 000000000..8e6fe195a --- /dev/null +++ b/mcpgateway/plugins/framework/external/mcp/tls_utils.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +"""Location: ./mcpgateway/plugins/framework/external/mcp/tls_utils.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +TLS/SSL utility functions for external MCP plugin connections. + +This module provides utilities for creating and configuring SSL contexts for +secure communication with external MCP plugin servers. It implements the +certificate validation logic that is tested in test_client_certificate_validation.py. +""" + +# Standard +import logging +import ssl + +# First-Party +from mcpgateway.plugins.framework.errors import PluginError +from mcpgateway.plugins.framework.models import MCPClientTLSConfig, PluginErrorModel + +logger = logging.getLogger(__name__) + + +def create_ssl_context(tls_config: MCPClientTLSConfig, plugin_name: str) -> ssl.SSLContext: + """Create and configure an SSL context for external plugin connections. + + This function implements the SSL/TLS security configuration for connecting to + external MCP plugin servers. It supports both standard TLS and mutual TLS (mTLS) + authentication. + + Security Features Implemented (per Python ssl docs and OpenSSL): + + 1. **Invalid Certificate Rejection**: ssl.create_default_context() with CERT_REQUIRED + automatically validates certificate signatures and chains via OpenSSL. + + 2. **Expired Certificate Handling**: OpenSSL automatically checks notBefore and + notAfter fields per RFC 5280 Section 6. Expired or not-yet-valid certificates + are rejected during the handshake. + + 3. **Certificate Chain Validation**: Full chain validation up to a trusted CA. + Each certificate in the chain is verified for validity period, signature, etc. + + 4. **Hostname Verification**: When check_hostname is enabled, the certificate's + Subject Alternative Name (SAN) or Common Name (CN) must match the hostname. + + 5. **MITM Prevention**: Via mutual authentication when client certificates are + provided (mTLS mode). + + Args: + tls_config: TLS configuration containing CA bundle, client certs, and verification settings + plugin_name: Name of the plugin (for error messages) + + Returns: + Configured SSLContext ready for use with httpx or other SSL connections + + Raises: + PluginError: If SSL context configuration fails + + Example: + >>> tls_config = MCPClientTLSConfig( # doctest: +SKIP + ... ca_bundle="/path/to/ca.crt", + ... certfile="/path/to/client.crt", + ... keyfile="/path/to/client.key", + ... verify=True, + ... check_hostname=True + ... ) + >>> ssl_context = create_ssl_context(tls_config, "MyPlugin") # doctest: +SKIP + >>> # Use ssl_context with httpx or other SSL connections + """ + try: + # Create SSL context with secure defaults + # Per Python docs: "The settings are chosen by the ssl module, and usually + # represent a higher security level than when calling the SSLContext + # constructor directly." + # This sets verify_mode to CERT_REQUIRED by default, which enables: + # - Certificate signature validation + # - Certificate chain validation up to trusted CA + # - Automatic expiration checking (notBefore/notAfter per RFC 5280) + ssl_context = ssl.create_default_context() + + if not tls_config.verify: + # Disable certificate verification (not recommended for production) + logger.warning(f"Certificate verification disabled for plugin '{plugin_name}'. " "This is not recommended for production use.") + ssl_context.check_hostname = False + ssl_context.verify_mode = ssl.CERT_NONE # noqa: DUO122 + else: + # Enable strict certificate verification (production mode) + # Load CA certificate bundle for server certificate validation + if tls_config.ca_bundle: + # This CA bundle will be used to validate the server's certificate + # OpenSSL will check: + # - Certificate is signed by a trusted CA in this bundle + # - Certificate hasn't expired (notAfter > now) + # - Certificate is already valid (notBefore < now) + # - Certificate chain is complete and valid + ssl_context.load_verify_locations(cafile=tls_config.ca_bundle) + + # Hostname verification + # When enabled, certificate's SAN or CN must match the server hostname + if not tls_config.check_hostname: + logger.warning(f"Hostname verification disabled for plugin '{plugin_name}'. " "This increases risk of MITM attacks.") + ssl_context.check_hostname = False + + # Load client certificate for mTLS (mutual authentication) + # If provided, the client will authenticate itself to the server + if tls_config.certfile: + ssl_context.load_cert_chain( + certfile=tls_config.certfile, + keyfile=tls_config.keyfile, + password=tls_config.keyfile_password, + ) + logger.debug(f"mTLS enabled for plugin '{plugin_name}' with client certificate: {tls_config.certfile}") + + # Log security configuration + logger.debug( + f"SSL context created for plugin '{plugin_name}': " + f"verify_mode={ssl_context.verify_mode}, " + f"check_hostname={ssl_context.check_hostname}, " + f"minimum_version={ssl_context.minimum_version}" + ) + + return ssl_context + + except Exception as exc: + error_msg = f"Failed to configure SSL context for plugin '{plugin_name}': {exc}" + logger.error(error_msg) + raise PluginError(error=PluginErrorModel(message=error_msg, plugin_name=plugin_name)) from exc diff --git a/mcpgateway/plugins/framework/manager.py b/mcpgateway/plugins/framework/manager.py index 374d727c4..20005ab50 100644 --- a/mcpgateway/plugins/framework/manager.py +++ b/mcpgateway/plugins/framework/manager.py @@ -614,13 +614,7 @@ async def initialize(self) -> None: for plugin_config in plugins: try: # For disabled plugins, create a stub plugin without full instantiation - if plugin_config.mode == PluginMode.DISABLED: - # Create a minimal stub plugin for display purposes only - stub_plugin = Plugin(plugin_config) - self._registry.register(stub_plugin) - loaded_count += 1 - logger.info(f"Registered disabled plugin: {plugin_config.name} (display only, not instantiated)") - else: + if plugin_config.mode != PluginMode.DISABLED: # Fully instantiate enabled plugins plugin = await self._loader.load_and_instantiate_plugin(plugin_config) if plugin: @@ -629,6 +623,9 @@ async def initialize(self) -> None: logger.info(f"Loaded plugin: {plugin_config.name} (mode: {plugin_config.mode})") else: raise ValueError(f"Unable to instantiate plugin: {plugin_config.name}") + else: + logger.info(f"Plugin: {plugin_config.name} is disabled. Ignoring.") + except Exception as e: # Clean error message without stack trace spam logger.error(f"Failed to load plugin '{plugin_config.name}': {str(e)}") diff --git a/mcpgateway/plugins/framework/models.py b/mcpgateway/plugins/framework/models.py index 85950b1ce..7906bee20 100644 --- a/mcpgateway/plugins/framework/models.py +++ b/mcpgateway/plugins/framework/models.py @@ -11,6 +11,7 @@ # Standard from enum import Enum +import os from pathlib import Path from typing import Any, Generic, Optional, Self, TypeVar @@ -246,18 +247,257 @@ class AppliedTo(BaseModel): resources: Optional[list[ResourceTemplate]] = None -class MCPConfig(BaseModel): - """An MCP configuration for external MCP plugin objects. +class MCPTransportTLSConfigBase(BaseModel): + """Base TLS configuration with common fields for both client and server. Attributes: - type (TransportType): The MCP transport type. Can be SSE, STDIO, or STREAMABLEHTTP + certfile (Optional[str]): Path to the PEM-encoded certificate file. + keyfile (Optional[str]): Path to the PEM-encoded private key file. + ca_bundle (Optional[str]): Path to a CA bundle file for verification. + keyfile_password (Optional[str]): Optional password for encrypted private key. + """ + + certfile: Optional[str] = Field(default=None, description="Path to PEM certificate file") + keyfile: Optional[str] = Field(default=None, description="Path to PEM private key file") + ca_bundle: Optional[str] = Field(default=None, description="Path to CA bundle for verification") + keyfile_password: Optional[str] = Field(default=None, description="Password for encrypted private key") + + @field_validator("ca_bundle", "certfile", "keyfile", mode=AFTER) + @classmethod + def validate_path(cls, value: Optional[str]) -> Optional[str]: + """Expand and validate file paths supplied in TLS configuration. + + Args: + value: File path to validate. + + Returns: + Expanded file path or None if not provided. + + Raises: + ValueError: If file path does not exist. + """ + + if not value: + return value + expanded = Path(value).expanduser() + if not expanded.is_file(): + raise ValueError(f"TLS file path does not exist: {value}") + return str(expanded) + + @model_validator(mode=AFTER) + def validate_cert_key(self) -> Self: # pylint: disable=bad-classmethod-argument + """Ensure certificate and key options are consistent. + + Returns: + Self after validation. + + Raises: + ValueError: If keyfile is specified without certfile. + """ + + if self.keyfile and not self.certfile: + raise ValueError("keyfile requires certfile to be specified") + return self + + @staticmethod + def _parse_bool(value: Optional[str]) -> Optional[bool]: + """Convert a string environment value to boolean. + + Args: + value: String value to parse as boolean. + + Returns: + Boolean value or None if value is None. + + Raises: + ValueError: If value is not a valid boolean string. + """ + + if value is None: + return None + normalized = value.strip().lower() + if normalized in {"1", "true", "yes", "on"}: + return True + if normalized in {"0", "false", "no", "off"}: + return False + raise ValueError(f"Invalid boolean value: {value}") + + +class MCPClientTLSConfig(MCPTransportTLSConfigBase): + """Client-side TLS configuration (gateway connecting to plugin). + + Attributes: + verify (bool): Whether to verify the remote server certificate. + check_hostname (bool): Enable hostname verification when verify is true. + """ + + verify: bool = Field(default=True, description="Verify the upstream server certificate") + check_hostname: bool = Field(default=True, description="Enable hostname verification") + + @classmethod + def from_env(cls) -> Optional["MCPClientTLSConfig"]: + """Construct client TLS configuration from PLUGINS_CLIENT_* environment variables. + + Returns: + MCPClientTLSConfig instance or None if no environment variables are set. + """ + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_CLIENT_MTLS_CERTFILE"): + data["certfile"] = env["PLUGINS_CLIENT_MTLS_CERTFILE"] + if env.get("PLUGINS_CLIENT_MTLS_KEYFILE"): + data["keyfile"] = env["PLUGINS_CLIENT_MTLS_KEYFILE"] + if env.get("PLUGINS_CLIENT_MTLS_CA_BUNDLE"): + data["ca_bundle"] = env["PLUGINS_CLIENT_MTLS_CA_BUNDLE"] + if env.get("PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD") is not None: + data["keyfile_password"] = env["PLUGINS_CLIENT_MTLS_KEYFILE_PASSWORD"] + + verify_val = cls._parse_bool(env.get("PLUGINS_CLIENT_MTLS_VERIFY")) + if verify_val is not None: + data["verify"] = verify_val + + check_hostname_val = cls._parse_bool(env.get("PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME")) + if check_hostname_val is not None: + data["check_hostname"] = check_hostname_val + + if not data: + return None + + return cls(**data) + + +class MCPServerTLSConfig(MCPTransportTLSConfigBase): + """Server-side TLS configuration (plugin accepting gateway connections). + + Attributes: + ssl_cert_reqs (int): Client certificate requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED). + """ + + ssl_cert_reqs: int = Field(default=2, description="Client certificate requirement (0=NONE, 1=OPTIONAL, 2=REQUIRED)") + + @classmethod + def from_env(cls) -> Optional["MCPServerTLSConfig"]: + """Construct server TLS configuration from PLUGINS_SERVER_SSL_* environment variables. + + Returns: + MCPServerTLSConfig instance or None if no environment variables are set. + + Raises: + ValueError: If PLUGINS_SERVER_SSL_CERT_REQS is not a valid integer. + """ + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_SERVER_SSL_KEYFILE"): + data["keyfile"] = env["PLUGINS_SERVER_SSL_KEYFILE"] + if env.get("PLUGINS_SERVER_SSL_CERTFILE"): + data["certfile"] = env["PLUGINS_SERVER_SSL_CERTFILE"] + if env.get("PLUGINS_SERVER_SSL_CA_CERTS"): + data["ca_bundle"] = env["PLUGINS_SERVER_SSL_CA_CERTS"] + if env.get("PLUGINS_SERVER_SSL_KEYFILE_PASSWORD") is not None: + data["keyfile_password"] = env["PLUGINS_SERVER_SSL_KEYFILE_PASSWORD"] + + if env.get("PLUGINS_SERVER_SSL_CERT_REQS"): + try: + data["ssl_cert_reqs"] = int(env["PLUGINS_SERVER_SSL_CERT_REQS"]) + except ValueError: + raise ValueError(f"Invalid PLUGINS_SERVER_SSL_CERT_REQS: {env['PLUGINS_SERVER_SSL_CERT_REQS']}") + + if not data: + return None + + return cls(**data) + + +class MCPServerConfig(BaseModel): + """Server-side MCP configuration (plugin running as server). + + Attributes: + host (str): Server host to bind to. + port (int): Server port to bind to. + tls (Optional[MCPServerTLSConfig]): Server-side TLS configuration. + """ + + host: str = Field(default="0.0.0.0", description="Server host to bind to") + port: int = Field(default=8000, description="Server port to bind to") + tls: Optional[MCPServerTLSConfig] = Field(default=None, description="Server-side TLS configuration") + + @staticmethod + def _parse_bool(value: Optional[str]) -> Optional[bool]: + """Convert a string environment value to boolean. + + Args: + value: String value to parse as boolean. + + Returns: + Boolean value or None if value is None. + + Raises: + ValueError: If value is not a valid boolean string. + """ + + if value is None: + return None + normalized = value.strip().lower() + if normalized in {"1", "true", "yes", "on"}: + return True + if normalized in {"0", "false", "no", "off"}: + return False + raise ValueError(f"Invalid boolean value: {value}") + + @classmethod + def from_env(cls) -> Optional["MCPServerConfig"]: + """Construct server configuration from PLUGINS_SERVER_* environment variables. + + Returns: + MCPServerConfig instance or None if no environment variables are set. + + Raises: + ValueError: If PLUGINS_SERVER_PORT is not a valid integer. + """ + + env = os.environ + data: dict[str, Any] = {} + + if env.get("PLUGINS_SERVER_HOST"): + data["host"] = env["PLUGINS_SERVER_HOST"] + if env.get("PLUGINS_SERVER_PORT"): + try: + data["port"] = int(env["PLUGINS_SERVER_PORT"]) + except ValueError: + raise ValueError(f"Invalid PLUGINS_SERVER_PORT: {env['PLUGINS_SERVER_PORT']}") + + # Check if SSL/TLS is enabled + ssl_enabled = cls._parse_bool(env.get("PLUGINS_SERVER_SSL_ENABLED")) + if ssl_enabled: + # Load TLS configuration + tls_config = MCPServerTLSConfig.from_env() + if tls_config: + data["tls"] = tls_config + + if not data: + return None + + return cls(**data) + + +class MCPClientConfig(BaseModel): + """Client-side MCP configuration (gateway connecting to external plugin). + + Attributes: + proto (TransportType): The MCP transport type. Can be SSE, STDIO, or STREAMABLEHTTP url (Optional[str]): An MCP URL. Only valid when MCP transport type is SSE or STREAMABLEHTTP. script (Optional[str]): The path and name to the STDIO script that runs the plugin server. Only valid for STDIO type. + tls (Optional[MCPClientTLSConfig]): Client-side TLS configuration for mTLS. """ proto: TransportType url: Optional[str] = None script: Optional[str] = None + tls: Optional[MCPClientTLSConfig] = None @field_validator(URL, mode=AFTER) @classmethod @@ -302,6 +542,21 @@ def validate_script(cls, script: str | None) -> str | None: raise ValueError(f"MCP server script {script} must have a .py or .sh suffix.") return script + @model_validator(mode=AFTER) + def validate_tls_usage(self) -> Self: # pylint: disable=bad-classmethod-argument + """Ensure TLS configuration is only used with HTTP-based transports. + + Returns: + Self after validation. + + Raises: + ValueError: If TLS configuration is used with non-HTTP transports. + """ + + if self.tls and self.proto not in (TransportType.SSE, TransportType.STREAMABLEHTTP): + raise ValueError("TLS configuration is only valid for HTTP/SSE transports") + return self + class PluginConfig(BaseModel): """A plugin configuration. @@ -320,7 +575,7 @@ class PluginConfig(BaseModel): conditions (Optional[list[PluginCondition]]): the conditions on which the plugin is run. applied_to (Optional[list[AppliedTo]]): the tools, fields, that the plugin is applied to. config (dict[str, Any]): the plugin specific configurations. - mcp (Optional[MCPConfig]): MCP configuration for external plugin when kind is "external". + mcp (Optional[MCPClientConfig]): Client-side MCP configuration (gateway connecting to plugin). """ name: str @@ -336,7 +591,7 @@ class PluginConfig(BaseModel): conditions: Optional[list[PluginCondition]] = None # When to apply applied_to: Optional[AppliedTo] = None # Fields to apply to. config: Optional[dict[str, Any]] = None - mcp: Optional[MCPConfig] = None + mcp: Optional[MCPClientConfig] = None @model_validator(mode=AFTER) def check_url_or_script_filled(self) -> Self: # pylint: disable=bad-classmethod-argument @@ -498,14 +753,16 @@ class Config(BaseModel): """Configurations for plugins. Attributes: - plugins: the list of plugins to enable. - plugin_dirs: The directories in which to look for plugins. - plugin_settings: global settings for plugins. + plugins (Optional[list[PluginConfig]]): the list of plugins to enable. + plugin_dirs (list[str]): The directories in which to look for plugins. + plugin_settings (PluginSettings): global settings for plugins. + server_settings (Optional[MCPServerConfig]): Server-side MCP configuration (when plugins run as server). """ plugins: Optional[list[PluginConfig]] = [] plugin_dirs: list[str] = [] plugin_settings: PluginSettings + server_settings: Optional[MCPServerConfig] = None class PromptPrehookPayload(BaseModel): diff --git a/mcpgateway/tools/builder/__init__.py b/mcpgateway/tools/builder/__init__.py new file mode 100644 index 000000000..ec309d8bd --- /dev/null +++ b/mcpgateway/tools/builder/__init__.py @@ -0,0 +1,8 @@ +# -*- coding: utf-8 -*- +"""Location: ./mcpgateway/tools/builder/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Builder Package. +""" diff --git a/mcpgateway/tools/builder/cli.py b/mcpgateway/tools/builder/cli.py new file mode 100644 index 000000000..19d38f029 --- /dev/null +++ b/mcpgateway/tools/builder/cli.py @@ -0,0 +1,296 @@ +""" +Location: ./mcpgateway/tools/builder/cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +MCP Stack Deployment Tool - Hybrid Dagger/Python Implementation + +This script can run in two modes: +1. Plain Python mode (default) - No external dependencies +2. Dagger mode (opt-in) - Requires dagger-io package, auto-downloads CLI + +Usage: + # Local execution (plain Python mode) + cforge deploy deploy.yaml + + # Use Dagger mode for optimization (requires dagger-io, auto-downloads CLI) + cforge --dagger deploy deploy.yaml + + # Inside container + docker run -v $PWD:/workspace mcpgateway/mcp-builder:latest deploy deploy.yaml + +Features: + - Validates deploy.yaml configuration + - Builds plugin containers from git repos + - Generates mTLS certificates + - Deploys to Kubernetes or Docker Compose + - Integrates with CI/CD vault secrets +""" + +# Standard +import asyncio +import os +from pathlib import Path +import sys +from typing import Optional + +# Third-Party +from rich.console import Console +from rich.panel import Panel +import typer +from typing_extensions import Annotated + +# First-Party +from mcpgateway.tools.builder.factory import DeployFactory + +app = typer.Typer( + help="Command line tools for deploying the gateway and plugins via a config file.", +) + +console = Console() + +deployer = None + +IN_CONTAINER = os.path.exists("/.dockerenv") or os.environ.get("CONTAINER") == "true" +BUILDER_DIR = Path(__file__).parent / "builder" +IMPL_MODE = "plain" + + +@app.callback() +def cli( + ctx: typer.Context, + dagger: Annotated[bool, typer.Option("--dagger", help="Use Dagger mode (requires dagger-io package)")] = False, + verbose: Annotated[bool, typer.Option("--verbose", "-v", help="Verbose output")] = False, +): + """MCP Stack deployment tool + + Deploys MCP Gateway + external plugins from a single YAML configuration. + + By default, uses plain Python mode. Use --dagger to enable Dagger optimization. + + Args: + ctx: Typer context object + dagger: Enable Dagger mode (requires dagger-io package and auto-downloads CLI) + verbose: Enable verbose output + """ + ctx.ensure_object(dict) + ctx.obj["verbose"] = verbose + ctx.obj["dagger"] = dagger + + if ctx.invoked_subcommand != "version": + # Show execution mode - default to Python, opt-in to Dagger + mode = "dagger" if dagger else "python" + ctx.obj["deployer"], ctx.obj["mode"] = DeployFactory.create_deployer(mode, verbose) + mode_color = "green" if ctx.obj["mode"] == "dagger" else "yellow" + env_text = "container" if IN_CONTAINER else "local" + + if verbose: + console.print(Panel(f"[bold]Mode:[/bold] [{mode_color}]{ctx.obj['mode']}[/{mode_color}]\n" f"[bold]Environment:[/bold] {env_text}\n", title="MCP Deploy", border_style=mode_color)) + + +@app.command() +def validate(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file.")]): + """Validate mcp-stack.yaml configuration + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + """ + impl = ctx.obj["deployer"] + + try: + impl.validate(config_file) + console.print("[green]✓ Configuration valid[/green]") + except Exception as e: + console.print(f"[red]✗ Validation failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def build( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + plugins_only: Annotated[bool, typer.Option("--plugins-only", help="Only build plugin containers")] = False, + plugin: Annotated[Optional[list[str]], typer.Option("--plugin", "-p", help="Build specific plugin(s)")] = None, + no_cache: Annotated[bool, typer.Option("--no-cache", help="Disable build cache")] = False, + copy_env_templates: Annotated[bool, typer.Option("--copy-env-templates", help="Copy .env.template files from plugin repos")] = True, +): + """Build containers + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + plugins_only: Only build plugin containers, skip gateway + plugin: List of specific plugin names to build + no_cache: Disable build cache + copy_env_templates: Copy .env.template files from plugin repos + """ + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.build(config_file, plugins_only=plugins_only, specific_plugins=list(plugin) if plugin else None, no_cache=no_cache, copy_env_templates=copy_env_templates)) + console.print("[green]✓ Build complete[/green]") + + if copy_env_templates: + console.print("[yellow]⚠ IMPORTANT: Review .env files in deploy/env/ before deploying![/yellow]") + console.print("[yellow] Update any required configuration values.[/yellow]") + except Exception as e: + console.print(f"[red]✗ Build failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def certs(ctx: typer.Context, config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")]): + """Generate mTLS certificates + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + """ + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.generate_certificates(config_file)) + console.print("[green]✓ Certificates generated[/green]") + except Exception as e: + console.print(f"[red]✗ Certificate generation failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def deploy( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + output_dir: Annotated[Optional[Path], typer.Option("--output-dir", "-o", help="The deployment configuration file")] = None, + dry_run: Annotated[bool, typer.Option("--dry-run", help="Generate manifests without deploying")] = False, + skip_build: Annotated[bool, typer.Option("--skip-build", help="Skip building containers")] = False, + skip_certs: Annotated[bool, typer.Option("--skip-certs", help="Skip certificate generation")] = False, +): + """Deploy MCP stack + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + output_dir: Custom output directory for manifests + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + """ + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.deploy(config_file, dry_run=dry_run, skip_build=skip_build, skip_certs=skip_certs, output_dir=output_dir)) + if dry_run: + console.print("[yellow]✓ Dry-run complete (no changes made)[/yellow]") + else: + console.print("[green]✓ Deployment complete[/green]") + except Exception as e: + console.print(f"[red]✗ Deployment failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def verify( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + wait: Annotated[bool, typer.Option("--wait", help="Wait for deployment to be ready")] = True, + timeout: Annotated[int, typer.Option("--timeout", help="Wait timeout in seconds")] = 300, +): + """Verify deployment health + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ + impl = ctx.obj["deployer"] + + try: + asyncio.run(impl.verify(config_file, wait=wait, timeout=timeout)) + console.print("[green]✓ Deployment healthy[/green]") + except Exception as e: + console.print(f"[red]✗ Verification failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def destroy( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + force: Annotated[bool, typer.Option("--force", help="Force destruction without confirmation")] = False, +): + """Destroy deployed MCP stack + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + force: Force destruction without confirmation + """ + impl = ctx.obj["deployer"] + + if not force: + if not typer.confirm("Are you sure you want to destroy the deployment?"): + console.print("[yellow]Aborted[/yellow]") + return + + try: + asyncio.run(impl.destroy(config_file)) + console.print("[green]✓ Deployment destroyed[/green]") + except Exception as e: + console.print(f"[red]✗ Destruction failed: {e}[/red]") + sys.exit(1) + + +@app.command() +def version(): + """Show version information""" + console.print( + Panel(f"[bold]MCP Deploy[/bold]\n" f"Version: 1.0.0\n" f"Mode: {IMPL_MODE}\n" f"Environment: {'container' if IN_CONTAINER else 'local'}\n", title="Version Info", border_style="blue") + ) + + +@app.command() +def generate( + ctx: typer.Context, + config_file: Annotated[Path, typer.Argument(help="The deployment configuration file")], + output: Annotated[Optional[Path], typer.Option("--output", "-o", help="Output directory for manifests")] = None, +): + """Generate deployment manifests (k8s or compose) + + Args: + ctx: Typer context object + config_file: Path to the deployment configuration file + output: Output directory for manifests + """ + impl = ctx.obj["deployer"] + + try: + manifests_dir = impl.generate_manifests(config_file, output_dir=output) + console.print(f"[green]✓ Manifests generated: {manifests_dir}[/green]") + except Exception as e: + console.print(f"[red]✗ Manifest generation failed: {e}[/red]") + sys.exit(1) + + +def main(): + """Main entry point + + Raises: + Exception: Any unhandled exception from subcommands (re-raised in debug mode) + """ + try: + app(obj={}) + except KeyboardInterrupt: + console.print("\n[yellow]Interrupted by user[/yellow]") + sys.exit(130) + except Exception as e: + console.print(f"[red]Fatal error: {e}[/red]") + if os.environ.get("MCP_DEBUG"): + raise + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/mcpgateway/tools/builder/common.py b/mcpgateway/tools/builder/common.py new file mode 100644 index 000000000..9601479cf --- /dev/null +++ b/mcpgateway/tools/builder/common.py @@ -0,0 +1,935 @@ +"""Location: ./mcpgateway/tools/builder/common.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Common utilities shared between Dagger and plain Python implementations. + +This module contains shared functionality to avoid code duplication between +the Dagger-based (dagger_module.py) and plain Python (plain_deploy.py) +implementations of the MCP Stack deployment system. + +Shared functions: +- load_config: Load and parse YAML configuration file +- generate_plugin_config: Generate plugins-config.yaml for gateway from mcp-stack.yaml +- generate_kubernetes_manifests: Generate Kubernetes deployment manifests +- generate_compose_manifests: Generate Docker Compose manifest +- copy_env_template: Copy .env.template from plugin repo to env.d/ directory +- handle_registry_operations: Tag and push images to container registry +- get_docker_compose_command: Detect available docker compose command +- run_compose: Run docker compose with error handling +- deploy_compose: Deploy using docker compose up -d +- verify_compose: Verify deployment with docker compose ps +- destroy_compose: Destroy deployment with docker compose down -v +- deploy_kubernetes: Deploy to Kubernetes using kubectl +- verify_kubernetes: Verify Kubernetes deployment health +- destroy_kubernetes: Destroy Kubernetes deployment with kubectl delete +""" + +# Standard +import base64 +import os +from pathlib import Path +import shutil +import subprocess +from typing import List + +# Third-Party +from jinja2 import Environment, FileSystemLoader +from rich.console import Console +import yaml + +# First-Party +from mcpgateway.tools.builder.schema import MCPStackConfig + +console = Console() + + +def get_deploy_dir() -> Path: + """Get deployment directory from environment variable or default. + + Checks MCP_DEPLOY_DIR environment variable, defaults to './deploy'. + + Returns: + Path to deployment directory + """ + deploy_dir = os.environ.get("MCP_DEPLOY_DIR", "./deploy") + return Path(deploy_dir) + + +def load_config(config_file: str) -> MCPStackConfig: + """Load and parse YAML configuration file into validated Pydantic model. + + Args: + config_file: Path to mcp-stack.yaml configuration file + + Returns: + Validated MCPStackConfig Pydantic model + + Raises: + FileNotFoundError: If configuration file doesn't exist + ValidationError: If configuration validation fails + """ + config_path = Path(config_file) + if not config_path.exists(): + raise FileNotFoundError(f"Configuration file not found: {config_file}") + + with open(config_path, encoding="utf-8") as f: + config_dict = yaml.safe_load(f) + + # Validate and return Pydantic model + return MCPStackConfig.model_validate(config_dict) + + +def generate_plugin_config(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> Path: + """Generate plugin config.yaml for gateway from mcp-stack.yaml. + + This function is shared between Dagger and plain Python implementations + to avoid code duplication. + + Args: + config: Validated MCPStackConfig Pydantic model + output_dir: Output directory for generated config + verbose: Print verbose output + + Returns: + Path to generated plugins-config.yaml file + + Raises: + FileNotFoundError: If template directory not found + """ + + deployment_type = config.deployment.type + plugins = config.plugins + + # Load template + template_dir = Path(__file__).parent / "templates" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + # YAML files should not use HTML autoescape + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=False) # nosec B701 + template = env.get_template("plugins-config.yaml.j2") + + # Prepare plugin data with computed URLs + plugin_data = [] + for plugin in plugins: + plugin_name = plugin.name + port = plugin.port or 8000 + + # Determine URL based on deployment type + if deployment_type == "compose": + # Use container hostname (lowercase) + hostname = plugin_name.lower() + # Use HTTPS if mTLS is enabled + protocol = "https" if plugin.mtls_enabled else "http" + url = f"{protocol}://{hostname}:{port}/mcp" + else: # kubernetes + # Use Kubernetes service DNS + namespace = config.deployment.namespace or "mcp-gateway" + service_name = f"mcp-plugin-{plugin_name.lower()}" + protocol = "https" if plugin.mtls_enabled else "http" + url = f"{protocol}://{service_name}.{namespace}.svc:{port}/mcp" + + # Build plugin entry with computed URL + plugin_entry = { + "name": plugin_name, + "port": port, + "url": url, + } + + # Merge plugin_overrides (client-side config only, excludes 'config') + # Allowed client-side fields that plugin manager uses + if plugin.plugin_overrides: + overrides = plugin.plugin_overrides + allowed_fields = ["priority", "mode", "description", "version", "author", "hooks", "tags", "conditions"] + for field in allowed_fields: + if field in overrides: + plugin_entry[field] = overrides[field] + + plugin_data.append(plugin_entry) + + # Render template + rendered = template.render(plugins=plugin_data) + + # Write config file + config_path = output_dir / "plugins-config.yaml" + config_path.write_text(rendered) + + if verbose: + print(f"✓ Plugin config generated: {config_path}") + + return config_path + + +def generate_kubernetes_manifests(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: + """Generate Kubernetes manifests from configuration. + + Args: + config: Validated MCPStackConfig Pydantic model + output_dir: Output directory for manifests + verbose: Print verbose output + + Raises: + FileNotFoundError: If template directory not found + """ + + # Load templates + template_dir = Path(__file__).parent / "templates" / "kubernetes" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + # Auto-detect and assign env files if not specified + _auto_detect_env_files(config, output_dir, verbose=verbose) + + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 + + # Generate namespace + namespace = config.deployment.namespace or "mcp-gateway" + + # Generate mTLS certificate resources if enabled + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False + + if gateway_mtls: + if use_cert_manager: + # Generate cert-manager Certificate CRDs + cert_manager_template = env.get_template("cert-manager-certificates.yaml.j2") + + # Calculate duration and renewBefore in hours + validity_days = cert_config.validity_days or 825 + duration_hours = validity_days * 24 + # Renew at 2/3 of lifetime (cert-manager default) + renew_before_hours = int(duration_hours * 2 / 3) + + # Prepare certificate data + cert_data = { + "namespace": namespace, + "gateway_name": "mcpgateway", + "issuer_name": cert_config.cert_manager_issuer or "mcp-ca-issuer", + "issuer_kind": cert_config.cert_manager_kind or "Issuer", + "duration": duration_hours, + "renew_before": renew_before_hours, + "plugins": [], + } + + # Add plugins with mTLS enabled + for plugin in config.plugins: + if plugin.mtls_enabled if plugin.mtls_enabled is not None else True: + cert_data["plugins"].append({"name": f"mcp-plugin-{plugin.name.lower()}"}) + + # Generate cert-manager certificates manifest + cert_manager_manifest = cert_manager_template.render(**cert_data) + (output_dir / "cert-manager-certificates.yaml").write_text(cert_manager_manifest) + if verbose: + print(" ✓ cert-manager Certificate CRDs manifest generated") + + else: + # Generate traditional certificate secrets (backward compatibility) + cert_secrets_template = env.get_template("cert-secrets.yaml.j2") + + # Prepare certificate data + cert_data = {"namespace": namespace, "gateway_name": "mcpgateway", "plugins": []} + + # Read and encode CA certificate + ca_cert_path = Path("certs/mcp/ca/ca.crt") + if ca_cert_path.exists(): + cert_data["ca_cert_b64"] = base64.b64encode(ca_cert_path.read_bytes()).decode("utf-8") + else: + if verbose: + print(f"[yellow]Warning: CA certificate not found at {ca_cert_path}[/yellow]") + + # Read and encode gateway certificates + gateway_cert_path = Path("certs/mcp/gateway/client.crt") + gateway_key_path = Path("certs/mcp/gateway/client.key") + if gateway_cert_path.exists() and gateway_key_path.exists(): + cert_data["gateway_cert_b64"] = base64.b64encode(gateway_cert_path.read_bytes()).decode("utf-8") + cert_data["gateway_key_b64"] = base64.b64encode(gateway_key_path.read_bytes()).decode("utf-8") + else: + if verbose: + print("[yellow]Warning: Gateway certificates not found[/yellow]") + + # Read and encode plugin certificates + for plugin in config.plugins: + if plugin.mtls_enabled if plugin.mtls_enabled is not None else True: + plugin_name = plugin.name + plugin_cert_path = Path(f"certs/mcp/plugins/{plugin_name}/server.crt") + plugin_key_path = Path(f"certs/mcp/plugins/{plugin_name}/server.key") + + if plugin_cert_path.exists() and plugin_key_path.exists(): + cert_data["plugins"].append( + { + "name": f"mcp-plugin-{plugin_name.lower()}", + "cert_b64": base64.b64encode(plugin_cert_path.read_bytes()).decode("utf-8"), + "key_b64": base64.b64encode(plugin_key_path.read_bytes()).decode("utf-8"), + } + ) + else: + if verbose: + print(f"[yellow]Warning: Plugin {plugin_name} certificates not found[/yellow]") + + # Generate certificate secrets manifest + if "ca_cert_b64" in cert_data: + cert_secrets_manifest = cert_secrets_template.render(**cert_data) + (output_dir / "cert-secrets.yaml").write_text(cert_secrets_manifest) + if verbose: + print(" ✓ mTLS certificate secrets manifest generated") + + # Generate infrastructure manifests (postgres, redis) if enabled + infrastructure = config.infrastructure + + # PostgreSQL + if infrastructure and infrastructure.postgres and infrastructure.postgres.enabled: + postgres_config = infrastructure.postgres + postgres_template = env.get_template("postgres.yaml.j2") + postgres_manifest = postgres_template.render( + namespace=namespace, + image=postgres_config.image or "quay.io/sclorg/postgresql-15-c9s:latest", + database=postgres_config.database or "mcp", + user=postgres_config.user or "postgres", + password=postgres_config.password or "mysecretpassword", + storage_size=postgres_config.storage_size or "10Gi", + storage_class=postgres_config.storage_class, + ) + (output_dir / "postgres-deployment.yaml").write_text(postgres_manifest) + if verbose: + print(" ✓ PostgreSQL deployment manifest generated") + + # Redis + if infrastructure and infrastructure.redis and infrastructure.redis.enabled: + redis_config = infrastructure.redis + redis_template = env.get_template("redis.yaml.j2") + redis_manifest = redis_template.render(namespace=namespace, image=redis_config.image or "redis:latest") + (output_dir / "redis-deployment.yaml").write_text(redis_manifest) + if verbose: + print(" ✓ Redis deployment manifest generated") + + # Generate plugins ConfigMap if plugins are configured + if config.plugins and len(config.plugins) > 0: + configmap_template = env.get_template("plugins-configmap.yaml.j2") + # Read the generated plugins-config.yaml file + plugins_config_path = output_dir / "plugins-config.yaml" + if plugins_config_path.exists(): + plugins_config_content = plugins_config_path.read_text() + configmap_manifest = configmap_template.render(namespace=namespace, plugins_config=plugins_config_content) + (output_dir / "plugins-configmap.yaml").write_text(configmap_manifest) + if verbose: + print(" ✓ Plugins ConfigMap manifest generated") + + # Generate gateway deployment + gateway_template = env.get_template("deployment.yaml.j2") + # Convert Pydantic model to dict for template rendering + gateway_dict = config.gateway.model_dump(exclude_none=True) + gateway_dict["name"] = "mcpgateway" + gateway_dict["namespace"] = namespace + gateway_dict["has_plugins"] = config.plugins and len(config.plugins) > 0 + + # Update image to use full registry path if registry is enabled + if config.gateway.registry and config.gateway.registry.enabled: + base_image_name = config.gateway.image.split(":")[0].split("/")[-1] + image_version = config.gateway.image.split(":")[-1] if ":" in config.gateway.image else "latest" + gateway_dict["image"] = f"{config.gateway.registry.url}/{config.gateway.registry.namespace}/{base_image_name}:{image_version}" + # Set imagePullPolicy from registry config + if config.gateway.registry.image_pull_policy: + gateway_dict["image_pull_policy"] = config.gateway.registry.image_pull_policy + + # Add DATABASE_URL and REDIS_URL to gateway environment if infrastructure is enabled + if "env_vars" not in gateway_dict: + gateway_dict["env_vars"] = {} + + # Enable plugins if any are configured + if config.plugins and len(config.plugins) > 0: + gateway_dict["env_vars"]["PLUGINS_ENABLED"] = "true" + gateway_dict["env_vars"]["PLUGIN_CONFIG_FILE"] = "/app/config/plugins.yaml" + + # Add init containers to wait for infrastructure services + init_containers = [] + + if infrastructure and infrastructure.postgres and infrastructure.postgres.enabled: + postgres = infrastructure.postgres + db_user = postgres.user or "postgres" + db_password = postgres.password or "mysecretpassword" + db_name = postgres.database or "mcp" + gateway_dict["env_vars"]["DATABASE_URL"] = f"postgresql://{db_user}:{db_password}@postgres:5432/{db_name}" + + # Add init container to wait for PostgreSQL + init_containers.append({"name": "wait-for-postgres", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z postgres 5432; do echo waiting for postgres; sleep 2; done"]}) + + if infrastructure and infrastructure.redis and infrastructure.redis.enabled: + gateway_dict["env_vars"]["REDIS_URL"] = "redis://redis:6379/0" + + # Add init container to wait for Redis + init_containers.append({"name": "wait-for-redis", "image": "busybox:1.36", "command": ["sh", "-c", "until nc -z redis 6379; do echo waiting for redis; sleep 2; done"]}) + + # Add init containers to wait for plugins to be ready + if config.plugins and len(config.plugins) > 0: + for plugin in config.plugins: + plugin_service_name = f"mcp-plugin-{plugin.name.lower()}" + plugin_port = plugin.port or 8000 + # Wait for plugin service to be available + init_containers.append( + { + "name": f"wait-for-{plugin.name.lower()}", + "image": "busybox:1.36", + "command": ["sh", "-c", f"until nc -z {plugin_service_name} {plugin_port}; do echo waiting for {plugin_service_name}; sleep 2; done"], + } + ) + + if init_containers: + gateway_dict["init_containers"] = init_containers + + gateway_manifest = gateway_template.render(**gateway_dict) + (output_dir / "gateway-deployment.yaml").write_text(gateway_manifest) + + # Generate OpenShift Route if configured + if config.deployment.openshift and config.deployment.openshift.create_routes: + route_template = env.get_template("route.yaml.j2") + openshift_config = config.deployment.openshift + + # Auto-detect OpenShift apps domain if not specified + openshift_domain = openshift_config.domain + if not openshift_domain: + try: + # Try to get domain from OpenShift cluster info + result = subprocess.run(["kubectl", "get", "ingresses.config.openshift.io", "cluster", "-o", "jsonpath={.spec.domain}"], capture_output=True, text=True, check=False) + if result.returncode == 0 and result.stdout.strip(): + openshift_domain = result.stdout.strip() + if verbose: + console.print(f"[dim]Auto-detected OpenShift domain: {openshift_domain}[/dim]") + else: + # Fallback to common OpenShift Local domain + openshift_domain = "apps-crc.testing" + if verbose: + console.print(f"[yellow]Could not auto-detect OpenShift domain, using default: {openshift_domain}[/yellow]") + except Exception: + # Fallback to common OpenShift Local domain + openshift_domain = "apps-crc.testing" + if verbose: + console.print(f"[yellow]Could not auto-detect OpenShift domain, using default: {openshift_domain}[/yellow]") + + route_manifest = route_template.render(namespace=namespace, openshift_domain=openshift_domain, tls_termination=openshift_config.tls_termination) + (output_dir / "gateway-route.yaml").write_text(route_manifest) + if verbose: + print(" ✓ OpenShift Route manifest generated") + + # Generate plugin deployments + for plugin in config.plugins: + # Convert Pydantic model to dict for template rendering + plugin_dict = plugin.model_dump(exclude_none=True) + plugin_dict["name"] = f"mcp-plugin-{plugin.name.lower()}" + plugin_dict["namespace"] = namespace + + # Update image to use full registry path if registry is enabled + if plugin.registry and plugin.registry.enabled: + base_image_name = plugin.image.split(":")[0].split("/")[-1] + image_version = plugin.image.split(":")[-1] if ":" in plugin.image else "latest" + plugin_dict["image"] = f"{plugin.registry.url}/{plugin.registry.namespace}/{base_image_name}:{image_version}" + # Set imagePullPolicy from registry config + if plugin.registry.image_pull_policy: + plugin_dict["image_pull_policy"] = plugin.registry.image_pull_policy + + plugin_manifest = gateway_template.render(**plugin_dict) + (output_dir / f"plugin-{plugin.name.lower()}-deployment.yaml").write_text(plugin_manifest) + + if verbose: + print(f"✓ Kubernetes manifests generated in {output_dir}") + + +def generate_compose_manifests(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: + """Generate Docker Compose manifest from configuration. + + Args: + config: Validated MCPStackConfig Pydantic model + output_dir: Output directory for manifests + verbose: Print verbose output + + Raises: + FileNotFoundError: If template directory not found + """ + + # Load templates + template_dir = Path(__file__).parent / "templates" / "compose" + if not template_dir.exists(): + raise FileNotFoundError(f"Template directory not found: {template_dir}") + + # Auto-detect and assign env files if not specified + _auto_detect_env_files(config, output_dir, verbose=verbose) + + # Auto-assign host_ports if expose_port is true but host_port not specified + next_host_port = 8000 + for plugin in config.plugins: + # Port defaults are handled by Pydantic defaults in schema + + # Auto-assign host_port if expose_port is true + if plugin.expose_port and not plugin.host_port: + plugin.host_port = next_host_port # type: ignore + next_host_port += 1 + + # Compute relative certificate paths (from output_dir to project root certs/) + # Certificates are at: ./certs/mcp/... + # Output dir is at: ./deploy/manifests/ + # So relative path is: ../../certs/mcp/... + certs_base = Path.cwd() / "certs" + certs_rel_base = os.path.relpath(certs_base, output_dir) + + # Add computed cert paths to context for template + cert_paths = { + "certs_base": certs_rel_base, + "gateway_cert_dir": os.path.join(certs_rel_base, "mcp/gateway"), + "ca_cert_file": os.path.join(certs_rel_base, "mcp/ca/ca.crt"), + "plugins_cert_base": os.path.join(certs_rel_base, "mcp/plugins"), + } + + env = Environment(loader=FileSystemLoader(str(template_dir)), autoescape=True) # nosec B701 + + # Generate compose file + compose_template = env.get_template("docker-compose.yaml.j2") + # Convert Pydantic model to dict for template rendering + config_dict = config.model_dump(exclude_none=True) + compose_manifest = compose_template.render(**config_dict, cert_paths=cert_paths) + (output_dir / "docker-compose.yaml").write_text(compose_manifest) + + if verbose: + print(f"✓ Compose manifest generated in {output_dir}") + + +def _auto_detect_env_files(config: MCPStackConfig, output_dir: Path, verbose: bool = False) -> None: + """Auto-detect and assign env files if not explicitly specified. + + If env_file is not specified in the config, check if {deploy_dir}/env/.env.{name} + exists and use it. Warn the user when auto-detection is used. + + Args: + config: MCPStackConfig Pydantic model (modified in-place via attribute assignment) + output_dir: Output directory where manifests will be generated (for relative paths) + verbose: Print verbose output + """ + deploy_dir = get_deploy_dir() + env_dir = deploy_dir / "env" + + # Check gateway - since we need to modify the model, we access env_file directly + # Note: Pydantic models allow attribute assignment after creation + if not hasattr(config.gateway, "env_file") or not config.gateway.env_file: + gateway_env = env_dir / ".env.gateway" + if gateway_env.exists(): + # Make path relative to output_dir (where docker-compose.yaml will be) + relative_path = os.path.relpath(gateway_env, output_dir) + config.gateway.env_file = relative_path # type: ignore + print(f"⚠ Auto-detected env file: {gateway_env}") + if verbose: + print(" (Gateway env_file not specified in config)") + + # Check plugins + for plugin in config.plugins: + plugin_name = plugin.name + if not hasattr(plugin, "env_file") or not plugin.env_file: + plugin_env = env_dir / f".env.{plugin_name}" + if plugin_env.exists(): + # Make path relative to output_dir (where docker-compose.yaml will be) + relative_path = os.path.relpath(plugin_env, output_dir) + plugin.env_file = relative_path # type: ignore + print(f"⚠ Auto-detected env file: {plugin_env}") + if verbose: + print(f" (Plugin {plugin_name} env_file not specified in config)") + + +def copy_env_template(plugin_name: str, plugin_build_dir: Path, verbose: bool = False) -> None: + """Copy .env.template from plugin repo to {deploy_dir}/env/ directory. + + Uses MCP_DEPLOY_DIR environment variable if set, defaults to './deploy'. + This function is shared between Dagger and plain Python implementations. + + Args: + plugin_name: Name of the plugin + plugin_build_dir: Path to plugin build directory (contains .env.template) + verbose: Print verbose output + """ + # Create {deploy_dir}/env directory if it doesn't exist + deploy_dir = get_deploy_dir() + env_dir = deploy_dir / "env" + env_dir.mkdir(parents=True, exist_ok=True) + + # Look for .env.template in plugin build directory + template_file = plugin_build_dir / ".env.template" + if not template_file.exists(): + if verbose: + print(f"No .env.template found in {plugin_name}") + return + + # Target file path + target_file = env_dir / f".env.{plugin_name}" + + # Only copy if target doesn't exist (don't overwrite user edits) + if target_file.exists(): + if verbose: + print(f"⚠ {target_file} already exists, skipping") + return + + # Copy template + shutil.copy2(template_file, target_file) + if verbose: + print(f"✓ Copied .env.template -> {target_file}") + + +def handle_registry_operations(component, component_name: str, image_tag: str, container_runtime: str, verbose: bool = False) -> str: + """Handle registry tagging and pushing for a built component. + + This function is shared between Dagger and plain Python implementations. + It tags the locally built image with the registry path and optionally pushes it. + + Args: + component: BuildableConfig component (GatewayConfig or PluginConfig) + component_name: Name of the component (gateway or plugin name) + image_tag: Current local image tag + container_runtime: Container runtime to use ("docker" or "podman") + verbose: Print verbose output + + Returns: + Final image tag (registry path if registry enabled, otherwise original tag) + + Raises: + ValueError: If registry enabled but missing required configuration + subprocess.CalledProcessError: If tag or push command fails + """ + # First-Party + from mcpgateway.tools.builder.schema import BuildableConfig + + # Type check for better error messages + if not isinstance(component, BuildableConfig): + raise TypeError(f"Component must be a BuildableConfig instance, got {type(component)}") + + # Check if registry is enabled + if not component.registry or not component.registry.enabled: + return image_tag + + registry_config = component.registry + + # Validate registry configuration + if not registry_config.url or not registry_config.namespace: + raise ValueError(f"Registry enabled for {component_name} but missing 'url' or 'namespace' configuration") + + # Construct registry image path + # Format: {registry_url}/{namespace}/{image_name}:{tag} + base_image_name = image_tag.split(":")[0].split("/")[-1] # Extract base name (e.g., "mcpgateway-gateway") + image_version = image_tag.split(":")[-1] if ":" in image_tag else "latest" # Extract tag + registry_image = f"{registry_config.url}/{registry_config.namespace}/{base_image_name}:{image_version}" + + # Tag image for registry + if verbose: + console.print(f"[dim]Tagging {image_tag} as {registry_image}[/dim]") + tag_cmd = [container_runtime, "tag", image_tag, registry_image] + result = subprocess.run(tag_cmd, capture_output=True, text=True, check=True) + if result.stdout and verbose: + console.print(result.stdout) + + # Push to registry if enabled + if registry_config.push: + if verbose: + console.print(f"[blue]Pushing {registry_image} to registry...[/blue]") + + # Build push command with TLS options + push_cmd = [container_runtime, "push"] + + # For podman, add --tls-verify=false for registries with self-signed certs + # This is common for OpenShift internal registries and local development + if container_runtime == "podman": + push_cmd.append("--tls-verify=false") + + push_cmd.append(registry_image) + + try: + result = subprocess.run(push_cmd, capture_output=True, text=True, check=True) + if result.stdout and verbose: + console.print(result.stdout) + console.print(f"[green]✓ Pushed to registry: {registry_image}[/green]") + except subprocess.CalledProcessError as e: + console.print(f"[red]✗ Failed to push to registry: {e}[/red]") + if e.stderr: + console.print(f"[red]Error output: {e.stderr}[/red]") + console.print("[yellow]Tip: Authenticate to the registry first:[/yellow]") + console.print(f" {container_runtime} login {registry_config.url}") + raise + + # Update component image reference to use registry path for manifests + component.image = registry_image + + return registry_image + + +# Docker Compose Utilities + + +def get_docker_compose_command() -> List[str]: + """Detect and return available docker compose command. + + Tries to detect docker compose plugin first, then falls back to + standalone docker-compose command. + + Returns: + Command to use: ["docker", "compose"] or ["docker-compose"] + + Raises: + RuntimeError: If neither command is available + """ + # Try docker compose (new plugin) first + if shutil.which("docker"): + try: + subprocess.run(["docker", "compose", "version"], capture_output=True, check=True) + return ["docker", "compose"] + except (subprocess.CalledProcessError, FileNotFoundError): + pass + + # Fall back to standalone docker-compose + if shutil.which("docker-compose"): + return ["docker-compose"] + + raise RuntimeError("Docker Compose not found. Install docker compose plugin or docker-compose.") + + +def run_compose(compose_file: Path, args: List[str], verbose: bool = False, check: bool = True) -> subprocess.CompletedProcess: + """Run docker compose command with given arguments. + + Args: + compose_file: Path to docker-compose.yaml + args: Arguments to pass to compose (e.g., ["up", "-d"]) + verbose: Print verbose output + check: Raise exception on non-zero exit code + + Returns: + CompletedProcess instance + + Raises: + FileNotFoundError: If compose_file doesn't exist + RuntimeError: If docker compose command fails (when check=True) + """ + if not compose_file.exists(): + raise FileNotFoundError(f"Compose file not found: {compose_file}") + + compose_cmd = get_docker_compose_command() + full_cmd = compose_cmd + ["-f", str(compose_file)] + args + + if verbose: + console.print(f"[dim]Running: {' '.join(full_cmd)}[/dim]") + + try: + result = subprocess.run(full_cmd, capture_output=True, text=True, check=check) + return result + except subprocess.CalledProcessError as e: + console.print("\n[red bold]Docker Compose command failed:[/red bold]") + if e.stdout: + console.print(f"[yellow]Output:[/yellow]\n{e.stdout}") + if e.stderr: + console.print(f"[red]Error:[/red]\n{e.stderr}") + raise RuntimeError(f"Docker Compose failed with exit code {e.returncode}") from e + + +def deploy_compose(compose_file: Path, verbose: bool = False) -> None: + """Deploy using docker compose up -d. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Raises: + RuntimeError: If deployment fails + """ + result = run_compose(compose_file, ["up", "-d"], verbose=verbose) + if result.stdout and verbose: + console.print(result.stdout) + console.print("[green]✓ Deployed with Docker Compose[/green]") + + +def verify_compose(compose_file: Path, verbose: bool = False) -> str: + """Verify Docker Compose deployment with ps command. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Returns: + Output from docker compose ps command + """ + result = run_compose(compose_file, ["ps"], verbose=verbose, check=False) + return result.stdout + + +def destroy_compose(compose_file: Path, verbose: bool = False) -> None: + """Destroy Docker Compose deployment with down -v. + + Args: + compose_file: Path to docker-compose.yaml + verbose: Print verbose output + + Raises: + RuntimeError: If destruction fails + """ + if not compose_file.exists(): + console.print(f"[yellow]Compose file not found: {compose_file}[/yellow]") + console.print("[yellow]Nothing to destroy[/yellow]") + return + + result = run_compose(compose_file, ["down", "-v"], verbose=verbose) + if result.stdout and verbose: + console.print(result.stdout) + console.print("[green]✓ Destroyed Docker Compose deployment[/green]") + + +# Kubernetes kubectl utilities + + +def deploy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: + """Deploy to Kubernetes using kubectl. + + Applies manifests in correct order: + 1. Deployments (creates namespaces) + 2. Certificate resources (secrets or cert-manager CRDs) + 3. ConfigMaps (plugins configuration) + 4. Infrastructure (PostgreSQL, Redis) + 5. OpenShift Routes (if configured) + + Excludes plugins-config.yaml (not a Kubernetes resource). + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + verbose: Print verbose output + + Raises: + RuntimeError: If kubectl not found or deployment fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot deploy to Kubernetes.") + + # Get all manifest files, excluding plugins-config.yaml (not a Kubernetes resource) + all_manifests = sorted(manifests_dir.glob("*.yaml")) + all_manifests = [m for m in all_manifests if m.name != "plugins-config.yaml"] + + # Identify different types of manifests + cert_secrets = manifests_dir / "cert-secrets.yaml" + cert_manager_certs = manifests_dir / "cert-manager-certificates.yaml" + postgres_deploy = manifests_dir / "postgres-deployment.yaml" + redis_deploy = manifests_dir / "redis-deployment.yaml" + plugins_configmap = manifests_dir / "plugins-configmap.yaml" + + # 1. Apply all deployments first (creates namespaces) + deployment_files = [m for m in all_manifests if m.name.endswith("-deployment.yaml") and m not in [cert_secrets, postgres_deploy, redis_deploy]] + + # Apply deployment files (this creates the namespace) + for manifest in deployment_files: + result = subprocess.run(["kubectl", "apply", "-f", str(manifest)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 2. Apply certificate resources (now namespace exists) + # Check for both cert-secrets.yaml (local mode) and cert-manager-certificates.yaml (cert-manager mode) + if cert_manager_certs.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(cert_manager_certs)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + elif cert_secrets.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(cert_secrets)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 3. Apply ConfigMaps (needed by deployments) + if plugins_configmap.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(plugins_configmap)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 4. Apply infrastructure + for infra_file in [postgres_deploy, redis_deploy]: + if infra_file.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(infra_file)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl apply failed: {result.stderr}") + + # 5. Apply OpenShift Routes (if configured) + gateway_route = manifests_dir / "gateway-route.yaml" + if gateway_route.exists(): + result = subprocess.run(["kubectl", "apply", "-f", str(gateway_route)], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + # Don't fail on Route errors (may not be on OpenShift) + if verbose: + console.print(f"[yellow]Warning: Could not apply Route (may not be on OpenShift): {result.stderr}[/yellow]") + + console.print("[green]✓ Deployed to Kubernetes[/green]") + + +def verify_kubernetes(namespace: str, wait: bool = False, timeout: int = 300, verbose: bool = False) -> str: + """Verify Kubernetes deployment health. + + Args: + namespace: Kubernetes namespace to check + wait: Wait for pods to be ready + timeout: Wait timeout in seconds + verbose: Print verbose output + + Returns: + String output from kubectl get pods + + Raises: + RuntimeError: If kubectl not found or verification fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot verify Kubernetes deployment.") + + # Get pod status + result = subprocess.run(["kubectl", "get", "pods", "-n", namespace], capture_output=True, text=True, check=False) + output = result.stdout if result.stdout else "" + if result.returncode != 0: + raise RuntimeError(f"kubectl get pods failed: {result.stderr}") + + # Wait for pods if requested + if wait: + result = subprocess.run(["kubectl", "wait", "--for=condition=Ready", "pod", "--all", "-n", namespace, f"--timeout={timeout}s"], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0: + raise RuntimeError(f"kubectl wait failed: {result.stderr}") + + return output + + +def destroy_kubernetes(manifests_dir: Path, verbose: bool = False) -> None: + """Destroy Kubernetes deployment. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + verbose: Print verbose output + + Raises: + RuntimeError: If kubectl not found or destruction fails + """ + if not shutil.which("kubectl"): + raise RuntimeError("kubectl not found. Cannot destroy Kubernetes deployment.") + + if not manifests_dir.exists(): + console.print(f"[yellow]Manifests directory not found: {manifests_dir}[/yellow]") + console.print("[yellow]Nothing to destroy[/yellow]") + return + + # Delete all manifests except plugins-config.yaml + all_manifests = sorted(manifests_dir.glob("*.yaml")) + all_manifests = [m for m in all_manifests if m.name != "plugins-config.yaml"] + + for manifest in all_manifests: + result = subprocess.run(["kubectl", "delete", "-f", str(manifest), "--ignore-not-found=true"], capture_output=True, text=True, check=False) + if result.stdout and verbose: + console.print(result.stdout) + if result.returncode != 0 and "NotFound" not in result.stderr: + console.print(f"[yellow]Warning: {result.stderr}[/yellow]") + + console.print("[green]✓ Destroyed Kubernetes deployment[/green]") diff --git a/mcpgateway/tools/builder/dagger_deploy.py b/mcpgateway/tools/builder/dagger_deploy.py new file mode 100644 index 000000000..d5ece26d5 --- /dev/null +++ b/mcpgateway/tools/builder/dagger_deploy.py @@ -0,0 +1,556 @@ +"""Location: ./mcpgateway/tools/builder/dagger_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Dagger-based MCP Stack Deployment Module + +This module provides optimized build and deployment using Dagger. + +Features: +- Automatic caching and parallelization +- Content-addressable storage +- Efficient multi-stage builds +- Built-in layer caching +""" + +# Standard +from pathlib import Path +from typing import List, Optional + +try: + # Third-Party + import dagger + from dagger import dag + + DAGGER_AVAILABLE = True +except ImportError: + DAGGER_AVAILABLE = False + dagger = None # type: ignore + dag = None # type: ignore + +# Third-Party +from rich.console import Console +from rich.progress import Progress, SpinnerColumn, TextColumn + +# First-Party +from mcpgateway.tools.builder.common import ( + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + handle_registry_operations, + load_config, + verify_compose, + verify_kubernetes, +) +from mcpgateway.tools.builder.common import copy_env_template as copy_template +from mcpgateway.tools.builder.pipeline import CICDModule +from mcpgateway.tools.builder.schema import BuildableConfig, MCPStackConfig + +console = Console() + + +class MCPStackDagger(CICDModule): + """Dagger-based implementation of MCP Stack deployment.""" + + def __init__(self, verbose: bool = False): + """Initialize MCPStackDagger instance. + + Args: + verbose: Enable verbose output + + Raises: + ImportError: If dagger is not installed + """ + if not DAGGER_AVAILABLE: + raise ImportError("Dagger is not installed. Install with: pip install dagger-io\n" "Alternatively, use the plain Python deployer with --deployer=python") + super().__init__(verbose) + + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build gateway and plugin containers using Dagger. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build + no_cache: Disable Dagger cache + copy_env_templates: Copy .env.template files from cloned repos + + Raises: + Exception: If build fails for any component + """ + config = load_config(config_file) + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + # Build gateway (unless plugins_only=True) + if not plugins_only: + gateway = config.gateway + if gateway.repo: + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + task = progress.add_task("Building gateway...", total=None) + try: + await self._build_component_with_dagger(gateway, "gateway", no_cache=no_cache) + progress.update(task, completed=1, description="[green]✓ Built gateway[/green]") + except Exception as e: + progress.update(task, completed=1, description="[red]✗ Failed gateway[/red]") + # Print full error after progress bar closes + self.console.print("\n[red bold]Gateway build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + elif self.verbose: + self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") + + # Build plugins + plugins = config.plugins + + if specific_plugins: + plugins = [p for p in plugins if p.name in specific_plugins] + + if not plugins: + self.console.print("[yellow]No plugins to build[/yellow]") + return + + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + + for plugin in plugins: + plugin_name = plugin.name + + # Skip if pre-built image specified + if plugin.image and not plugin.repo: + task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) + progress.update(task, completed=1) + continue + + task = progress.add_task(f"Building {plugin_name}...", total=None) + + try: + await self._build_component_with_dagger(plugin, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) + progress.update(task, completed=1, description=f"[green]✓ Built {plugin_name}[/green]") + except Exception as e: + progress.update(task, completed=1, description=f"[red]✗ Failed {plugin_name}[/red]") + # Print full error after progress bar closes + self.console.print(f"\n[red bold]Plugin '{plugin_name}' build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for plugins. + + Supports two modes: + 1. Local generation (use_cert_manager=false): Uses Dagger to generate certificates locally + 2. cert-manager (use_cert_manager=true): Skips local generation, cert-manager will create certificates + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + dagger.ExecError: If certificate generation command fails (when using local generation) + dagger.QueryError: If Dagger query fails (when using local generation) + """ + config = load_config(config_file) + + # Check if using cert-manager + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False + validity_days = cert_config.validity_days if cert_config else 825 + + if use_cert_manager: + # Skip local generation - cert-manager will handle certificate creation + if self.verbose: + self.console.print("[blue]Using cert-manager for certificate management[/blue]") + self.console.print("[dim]Skipping local certificate generation (cert-manager will create certificates)[/dim]") + return + + # Local certificate generation (backward compatibility) + if self.verbose: + self.console.print("[blue]Generating mTLS certificates locally...[/blue]") + + # Use Dagger container to run certificate generation + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + # Mount current directory + source = dag.host().directory(".") + try: + # Use Alpine with openssl + container = ( + dag.container() + .from_("alpine:latest") + .with_exec(["apk", "add", "--no-cache", "openssl", "python3", "py3-pip", "make", "bash"]) + .with_mounted_directory("/workspace", source) + .with_workdir("/workspace") + # .with_exec(["python3", "-m", "venv", ".venv"]) + # .with_exec(["sh", "-c", "source .venv/bin/activate && pip install pyyaml"]) + # .with_exec(["pip", "install", "pyyaml"]) + ) + + # Generate CA + container = container.with_exec(["sh", "-c", f"make certs-mcp-ca MCP_CERT_DAYS={validity_days}"]) + + # Generate gateway cert + container = container.with_exec(["sh", "-c", f"make certs-mcp-gateway MCP_CERT_DAYS={validity_days}"]) + + # Generate plugin certificates + plugins = config.plugins + for plugin in plugins: + plugin_name = plugin.name + container = container.with_exec(["sh", "-c", f"make certs-mcp-plugin PLUGIN_NAME={plugin_name} MCP_CERT_DAYS={validity_days}"]) + + # Export certificates back to host + output = container.directory("/workspace/certs") + await output.export("./certs") + except dagger.ExecError as e: + self.console.print(f"Dagger Exec Error: {e.message}") + self.console.print(f"Exit Code: {e.exit_code}") + self.console.print(f"Stderr: {e.stderr}") + raise + except dagger.QueryError as e: + self.console.print(f"Dagger Query Error: {e.errors}") + self.console.print(f"Debug Query: {e.debug_query()}") + raise + except Exception as e: + self.console.print(f"An unexpected error occurred: {e}") + raise + + if self.verbose: + self.console.print("[green]✓ Certificates generated locally[/green]") + + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: + """Deploy MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + output_dir: Output directory for manifests (default: ./deploy) + + Raises: + ValueError: If unsupported deployment type specified + dagger.ExecError: If deployment command fails + dagger.QueryError: If Dagger query fails + """ + config = load_config(config_file) + + # Build containers + if not skip_build: + await self.build(config_file) + + # Generate certificates (only if mTLS is enabled) + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + plugin_mtls = any((p.mtls_enabled if p.mtls_enabled is not None else True) for p in config.plugins) + mtls_needed = gateway_mtls or plugin_mtls + + if not skip_certs and mtls_needed: + await self.generate_certificates(config_file) + elif not skip_certs and not mtls_needed: + if self.verbose: + self.console.print("[dim]Skipping certificate generation (mTLS disabled)[/dim]") + + # Generate manifests + manifests_dir = self.generate_manifests(config_file, output_dir=output_dir) + + if dry_run: + self.console.print(f"[yellow]Dry-run: Manifests generated in {manifests_dir}[/yellow]") + return + + # Apply deployment + deployment_type = config.deployment.type + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + try: + if deployment_type == "kubernetes": + await self._deploy_kubernetes(manifests_dir) + elif deployment_type == "compose": + await self._deploy_compose(manifests_dir) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + except dagger.ExecError as e: + self.console.print(f"Dagger Exec Error: {e.message}") + self.console.print(f"Exit Code: {e.exit_code}") + self.console.print(f"Stderr: {e.stderr}") + raise + except dagger.QueryError as e: + self.console.print(f"Dagger Query Error: {e.errors}") + self.console.print(f"Debug Query: {e.debug_query()}") + raise + except Exception as e: + # Extract detailed error from Dagger exception + error_msg = str(e) + self.console.print("\n[red bold]Deployment failed:[/red bold]") + self.console.print(f"[red]{error_msg}[/red]") + + # Check if it's a compose-specific error and try to provide more context + if "compose" in error_msg.lower() and self.verbose: + self.console.print("\n[yellow]Hint:[/yellow] Check the generated docker-compose.yaml:") + self.console.print(f"[dim] {manifests_dir}/docker-compose.yaml[/dim]") + self.console.print("[yellow]Try running manually:[/yellow]") + self.console.print(f"[dim] cd {manifests_dir} && docker compose up[/dim]") + + raise + + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health. + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if self.verbose: + self.console.print("[blue]Verifying deployment...[/blue]") + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + if deployment_type == "kubernetes": + await self._verify_kubernetes(config, wait=wait, timeout=timeout) + elif deployment_type == "compose": + await self._verify_compose(config, wait=wait, timeout=timeout) + + async def destroy(self, config_file: str) -> None: + """Destroy deployed MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if self.verbose: + self.console.print("[blue]Destroying deployment...[/blue]") + + async with dagger.connection(dagger.Config(workdir=str(Path.cwd()))): + if deployment_type == "kubernetes": + await self._destroy_kubernetes(config) + elif deployment_type == "compose": + await self._destroy_compose(config) + + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests. + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests + + Returns: + Path to generated manifests directory + + Raises: + ValueError: If unsupported deployment type specified + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if output_dir is None: + deploy_dir = get_deploy_dir() + # Separate subdirectories for kubernetes and compose + manifests_path = deploy_dir / "manifests" / deployment_type + else: + manifests_path = Path(output_dir) + + manifests_path.mkdir(parents=True, exist_ok=True) + + # Store output dir for later use + self._last_output_dir = manifests_path + + # Generate plugin config.yaml for gateway (shared function) + generate_plugin_config(config, manifests_path, verbose=self.verbose) + + if deployment_type == "kubernetes": + generate_kubernetes_manifests(config, manifests_path, verbose=self.verbose) + elif deployment_type == "compose": + generate_compose_manifests(config, manifests_path, verbose=self.verbose) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + return manifests_path + + # Private helper methods + + async def _build_component_with_dagger(self, component: BuildableConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build a component (gateway or plugin) container using Dagger. + + Args: + component: Component configuration (GatewayConfig or PluginConfig) + component_name: Name of the component (gateway or plugin name) + no_cache: Disable cache + copy_env_templates: Copy .env.template from repo if it exists + + Raises: + ValueError: If component has no repo field + Exception: If build or export fails + """ + repo = component.repo + + if not repo: + raise ValueError(f"Component '{component_name}' has no 'repo' field") + + # Clone repository to local directory for env template access + git_ref = component.ref or "main" + clone_dir = Path(f"./build/{component_name}") + + # For Dagger, we still need local clone if copying env templates + if copy_env_templates: + # Standard + import subprocess + + clone_dir.mkdir(parents=True, exist_ok=True) + + if (clone_dir / ".git").exists(): + subprocess.run(["git", "fetch", "origin", git_ref], cwd=clone_dir, check=True, capture_output=True) + # Checkout what we just fetched (FETCH_HEAD) + subprocess.run(["git", "checkout", "FETCH_HEAD"], cwd=clone_dir, check=True, capture_output=True) + else: + subprocess.run(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)], check=True, capture_output=True) + + # Determine build context + build_context = component.context or "." + build_dir = clone_dir / build_context + + # Copy env template using shared function + copy_template(component_name, build_dir, verbose=self.verbose) + + # Use Dagger for the actual build + source = dag.git(repo).branch(git_ref).tree() + + # If component has context subdirectory, navigate to it + build_context = component.context or "." + if build_context != ".": + source = source.directory(build_context) + + # Detect Containerfile/Dockerfile + containerfile = component.containerfile or "Containerfile" + + # Build container - determine image tag + if component.image: + # Use explicitly specified image name + image_tag = component.image + else: + # Generate default image name based on component type + image_tag = f"mcpgateway-{component_name.lower()}:latest" + + # Build with optional target stage for multi-stage builds + build_kwargs = {"dockerfile": containerfile} + if component.target: + build_kwargs["target"] = component.target + + # Use docker_build on the directory + container = source.docker_build(**build_kwargs) + + # Export image to Docker daemon (always export, Dagger handles caching) + # Workaround for dagger-io 0.19.0 bug: export_image returns None instead of Void + # The export actually works, but beartype complains about the return type + try: + await container.export_image(image_tag) + except Exception as e: + # Ignore beartype validation error - the export actually succeeds + if "BeartypeCallHintReturnViolation" not in str(type(e)): + raise + + # Handle registry operations (tag and push if enabled) + # Note: Dagger exports to local docker/podman, so we need to detect which runtime to use + # Standard + import shutil + + container_runtime = "docker" if shutil.which("docker") else "podman" + image_tag = handle_registry_operations(component, component_name, image_tag, container_runtime, verbose=self.verbose) + + if self.verbose: + self.console.print(f"[green]✓ Built {component_name} -> {image_tag}[/green]") + + async def _deploy_kubernetes(self, manifests_dir: Path) -> None: + """Deploy to Kubernetes using kubectl. + + Uses shared deploy_kubernetes() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + """ + deploy_kubernetes(manifests_dir, verbose=self.verbose) + + async def _deploy_compose(self, manifests_dir: Path) -> None: + """Deploy using Docker Compose. + + Uses shared deploy_compose() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing compose manifest + """ + compose_file = manifests_dir / "docker-compose.yaml" + deploy_compose(compose_file, verbose=self.verbose) + + async def _verify_kubernetes(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: + """Verify Kubernetes deployment health. + + Uses shared verify_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + wait: Wait for pods to be ready + timeout: Wait timeout in seconds + """ + namespace = config.deployment.namespace or "mcp-gateway" + output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) + self.console.print(output) + + async def _verify_compose(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: + """Verify Docker Compose deployment health. + + Uses shared verify_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + wait: Wait for containers to be ready + timeout: Wait timeout in seconds + """ + _ = config, wait, timeout # Reserved for future use + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + output = verify_compose(compose_file, verbose=self.verbose) + self.console.print(output) + + async def _destroy_kubernetes(self, config: MCPStackConfig) -> None: + """Destroy Kubernetes deployment. + + Uses shared destroy_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + """ + _ = config # Reserved for future use (namespace, labels, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") + destroy_kubernetes(manifests_dir, verbose=self.verbose) + + async def _destroy_compose(self, config: MCPStackConfig) -> None: + """Destroy Docker Compose deployment. + + Uses shared destroy_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + """ + _ = config # Reserved for future use (project name, networks, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + destroy_compose(compose_file, verbose=self.verbose) diff --git a/mcpgateway/tools/builder/factory.py b/mcpgateway/tools/builder/factory.py new file mode 100644 index 000000000..8c58ac5b1 --- /dev/null +++ b/mcpgateway/tools/builder/factory.py @@ -0,0 +1,119 @@ +"""Location: ./mcpgateway/tools/builder/factory.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Factory for creating MCP Stack deployment implementations. + +This module provides a factory pattern for creating the appropriate deployment +implementation (Dagger or Plain Python) based on availability and user preference. + +The factory handles graceful fallback from Dagger to Python if dependencies are +unavailable, ensuring the deployment system works in various environments. + +Example: + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. + >>> # Validate configuration (output varies by config) + >>> # deployer.validate("mcp-stack.yaml") +""" + +# Standard +from enum import Enum + +# Third-Party +from rich.console import Console + +# First-Party +from mcpgateway.tools.builder.pipeline import CICDModule + + +class CICDTypes(str, Enum): + """Deployment implementation types. + + Attributes: + DAGGER: Dagger-based implementation (optimal performance) + PYTHON: Plain Python implementation (fallback, no dependencies) + """ + + DAGGER = "dagger" + PYTHON = "python" + + +console = Console() + + +class DeployFactory: + """Factory for creating MCP Stack deployment implementations. + + This factory implements the Strategy pattern, allowing dynamic selection + between Dagger and Python implementations based on availability. + """ + + @staticmethod + def create_deployer(deployer: str, verbose: bool = False) -> tuple[CICDModule, CICDTypes]: + """Create a deployment implementation instance. + + Attempts to load the requested deployer type with automatic fallback + to Python implementation if dependencies are missing. + + Args: + deployer: Deployment type to create ("dagger" or "python") + verbose: Enable verbose logging during creation + + Returns: + tuple: (deployer_instance, actual_type) + - deployer_instance: Instance of MCPStackDagger or MCPStackPython + - actual_type: CICDTypes enum indicating which implementation was loaded + + Raises: + RuntimeError: If no implementation can be loaded (critical failure) + + Example: + >>> # Try to load Dagger, fall back to Python if unavailable + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. + >>> if mode == CICDTypes.DAGGER: + ... print("Using optimized Dagger implementation") + ... else: + ... print("Using fallback Python implementation") + Using fallback Python implementation + """ + # Attempt to load Dagger implementation first if requested + if deployer == "dagger": + try: + # First-Party + from mcpgateway.tools.builder.dagger_deploy import DAGGER_AVAILABLE, MCPStackDagger + + # Check if dagger is actually available (not just the module) + if not DAGGER_AVAILABLE: + raise ImportError("Dagger SDK not installed") + + if verbose: + console.print("[green]✓ Dagger module loaded[/green]") + + return (MCPStackDagger(verbose), CICDTypes.DAGGER) + + except ImportError: + # Dagger dependencies not available, fall back to Python + console.print("[yellow]⚠ Dagger not installed. Using plain python.[/yellow]") + + # Load plain Python implementation (fallback or explicitly requested) + try: + # First-Party + from mcpgateway.tools.builder.python_deploy import MCPStackPython + + if verbose and deployer != "dagger": + console.print("[blue]Using plain Python implementation[/blue]") + + return (MCPStackPython(verbose), CICDTypes.PYTHON) + + except ImportError as e: + # Critical failure - neither implementation can be loaded + console.print("[red]✗ ERROR: Cannot import deployment modules[/red]") + console.print(f"[red] Details: {e}[/red]") + console.print("[yellow] Make sure you're running from the project root[/yellow]") + console.print("[yellow] and PYTHONPATH is set correctly[/yellow]") + + # This should never be reached if PYTHONPATH is set correctly + raise RuntimeError(f"Unable to load deployer of type '{deployer}'. ") diff --git a/mcpgateway/tools/builder/pipeline.py b/mcpgateway/tools/builder/pipeline.py new file mode 100644 index 000000000..db53384ad --- /dev/null +++ b/mcpgateway/tools/builder/pipeline.py @@ -0,0 +1,294 @@ +"""Location: ./mcpgateway/tools/builder/pipeline.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Abstract base class for MCP Stack deployment implementations. + +This module defines the CICDModule interface that all deployment implementations +must implement. It provides a common API for building, deploying, and managing +MCP Gateway stacks with external plugin servers. + +The base class implements shared functionality (validation) while requiring +subclasses to implement deployment-specific logic (build, deploy, etc.). + +Design Pattern: + Strategy Pattern - Different implementations (Dagger vs Python) can be + swapped transparently via the DeployFactory. + +Example: + >>> from mcpgateway.tools.builder.factory import DeployFactory + >>> deployer, mode = DeployFactory.create_deployer("dagger", verbose=False) + ⚠ Dagger not installed. Using plain python. + >>> # Validate configuration (output varies by config) + >>> # deployer.validate("mcp-stack.yaml") + >>> # Async methods must be called with await (see method examples below) +""" + +# Standard +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + +# Third-Party +from pydantic import ValidationError +from rich.console import Console +import yaml + +# First-Party +from mcpgateway.tools.builder.schema import MCPStackConfig + +# Shared console instance for consistent output formatting +console = Console() + + +class CICDModule(ABC): + """Abstract base class for MCP Stack deployment implementations. + + This class defines the interface that all deployment implementations must + implement. It provides common initialization and validation logic while + deferring implementation-specific details to subclasses. + + Attributes: + verbose (bool): Enable verbose output during operations + console (Console): Rich console for formatted output + + Implementations: + - MCPStackDagger: High-performance implementation using Dagger SDK + - MCPStackPython: Fallback implementation using plain Python + Docker/Podman + + Example: + >>> class MyDeployer(CICDModule): + ... async def build(self, config_file: str, **kwargs) -> None: + ... # Implementation-specific build logic + ... pass + """ + + def __init__(self, verbose: bool = False): + """Initialize the deployment module. + + Args: + verbose: Enable verbose output during all operations + """ + self.verbose = verbose + self.console = console + + def validate(self, config_file: str) -> None: + """Validate mcp-stack.yaml configuration using Pydantic schemas. + + This method provides comprehensive validation of the MCP stack configuration + using Pydantic models defined in schema.py. It validates: + - Required sections (deployment, gateway, plugins) + - Deployment type (kubernetes or compose) + - Gateway image specification + - Plugin configurations (name, repo/image, etc.) + - Custom business rules (unique names, valid combinations) + + Args: + config_file: Path to mcp-stack.yaml configuration file + + Raises: + ValueError: If configuration is invalid, with formatted error details + ValidationError: If Pydantic schema validation fails + FileNotFoundError: If config_file does not exist + + Example: + # deployer.validate("mcp-stack-local.yaml") + # ✓ Configuration valid + + # deployer.validate("invalid.yaml") + # ValueError: Configuration validation failed: + # • plugins -> 0 -> name: Field required + # • gateway -> image: Field required + """ + if self.verbose: + self.console.print(f"[blue]Validating {config_file}...[/blue]") + + # Load YAML configuration + with open(config_file, "r") as f: + config_dict = yaml.safe_load(f) + + # Validate using Pydantic schema + try: + # Local + + MCPStackConfig(**config_dict) + except ValidationError as e: + # Format validation errors for better readability + error_msg = "Configuration validation failed:\n" + for error in e.errors(): + # Join the error location path (e.g., plugins -> 0 -> name) + loc = " -> ".join(str(x) for x in error["loc"]) + error_msg += f" • {loc}: {error['msg']}\n" + raise ValueError(error_msg) from e + + if self.verbose: + self.console.print("[green]✓ Configuration valid[/green]") + + @abstractmethod + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[list[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build container images for plugins and/or gateway. + + Subclasses must implement this to build Docker/Podman images from + Git repositories or use pre-built images. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build (optional) + no_cache: Disable build cache for fresh builds + copy_env_templates: Copy .env.template files from cloned repos + + Raises: + RuntimeError: If build fails + ValueError: If plugin configuration is invalid + + Example: + # await deployer.build("mcp-stack.yaml", plugins_only=True) + # ✓ Built OPAPluginFilter + # ✓ Built LLMGuardPlugin + """ + pass + + @abstractmethod + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for gateway and plugins. + + Creates a certificate authority (CA) and issues certificates for: + - Gateway (client certificates for connecting to plugins) + - Each plugin (server certificates for accepting connections) + + Certificates are stored in the paths defined in the config's + certificates section (default: ./certs/mcp/). + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If certificate generation fails + FileNotFoundError: If required tools (openssl) are not available + + Example: + # await deployer.generate_certificates("mcp-stack.yaml") + # ✓ Certificates generated + """ + pass + + @abstractmethod + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False) -> None: + """Deploy the MCP stack to Kubernetes or Docker Compose. + + This is the main deployment method that orchestrates: + 1. Building containers (unless skip_build=True) + 2. Generating mTLS certificates (unless skip_certs=True or mTLS disabled) + 3. Generating manifests (Kubernetes YAML or docker-compose.yaml) + 4. Applying the deployment (unless dry_run=True) + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without actually deploying + skip_build: Skip building containers (use existing images) + skip_certs: Skip certificate generation (use existing certs) + + Raises: + RuntimeError: If deployment fails at any stage + ValueError: If configuration is invalid + + Example: + # Full deployment + # await deployer.deploy("mcp-stack.yaml") + # ✓ Build complete + # ✓ Certificates generated + # ✓ Deployment complete + + # Dry run (generate manifests only) + # await deployer.deploy("mcp-stack.yaml", dry_run=True) + # ✓ Dry-run complete (no changes made) + """ + pass + + @abstractmethod + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health and readiness. + + Checks that all deployed services are healthy and ready: + - Kubernetes: Checks pod status, optionally waits for Ready + - Docker Compose: Checks container status + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to become ready + timeout: Maximum time to wait in seconds (default: 300) + + Raises: + RuntimeError: If verification fails or timeout is reached + TimeoutError: If wait=True and deployment doesn't become ready + + Example: + # Quick health check + # await deployer.verify("mcp-stack.yaml") + # NAME READY STATUS RESTARTS AGE + # mcpgateway-xxx 1/1 Running 0 2m + # mcp-plugin-opa-xxx 1/1 Running 0 2m + + # Wait for ready state + # await deployer.verify("mcp-stack.yaml", wait=True, timeout=600) + # ✓ Deployment healthy + """ + pass + + @abstractmethod + async def destroy(self, config_file: str) -> None: + """Destroy the deployed MCP stack. + + Removes all deployed resources: + - Kubernetes: Deletes all resources in the namespace + - Docker Compose: Stops and removes containers, networks, volumes + + WARNING: This is destructive and cannot be undone! + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If destruction fails + + Example: + # await deployer.destroy("mcp-stack.yaml") + # ✓ Deployment destroyed + """ + pass + + @abstractmethod + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests (Kubernetes YAML or docker-compose.yaml). + + Creates deployment manifests based on configuration: + - Kubernetes: Generates Deployment, Service, ConfigMap, Secret YAML files + - Docker Compose: Generates docker-compose.yaml with all services + + Also generates: + - plugins-config.yaml: Plugin manager configuration for gateway + - Environment files: .env files for each service + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests (default: ./deploy/manifests) + + Returns: + Path: Directory containing generated manifests + + Raises: + ValueError: If configuration is invalid + OSError: If output directory cannot be created + + Example: + # manifests_path = deployer.generate_manifests("mcp-stack.yaml") + # print(f"Manifests generated in: {manifests_path}") + # Manifests generated in: /path/to/deploy/manifests + + # Custom output directory + # deployer.generate_manifests("mcp-stack.yaml", output_dir="./my-manifests") + # ✓ Manifests generated: ./my-manifests + """ + pass diff --git a/mcpgateway/tools/builder/python_deploy.py b/mcpgateway/tools/builder/python_deploy.py new file mode 100644 index 000000000..3af5eb17e --- /dev/null +++ b/mcpgateway/tools/builder/python_deploy.py @@ -0,0 +1,527 @@ +"""Location: ./mcpgateway/tools/builder/python_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Plain Python MCP Stack Deployment Module + +This module provides deployment functionality using only standard Python +and system commands (docker/podman, kubectl, docker-compose). + +This is the fallback implementation when Dagger is not available. +""" + +# Standard +from pathlib import Path +import shutil +import subprocess +from typing import List, Optional + +# Third-Party +from rich.console import Console +from rich.progress import Progress, SpinnerColumn, TextColumn + +# First-Party +from mcpgateway.tools.builder.common import ( + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + handle_registry_operations, + load_config, + verify_compose, + verify_kubernetes, +) +from mcpgateway.tools.builder.common import copy_env_template as copy_template +from mcpgateway.tools.builder.pipeline import CICDModule +from mcpgateway.tools.builder.schema import BuildableConfig, MCPStackConfig + +console = Console() + + +class MCPStackPython(CICDModule): + """Plain Python implementation of MCP Stack deployment.""" + + async def build(self, config_file: str, plugins_only: bool = False, specific_plugins: Optional[List[str]] = None, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build gateway and plugin containers using docker/podman. + + Args: + config_file: Path to mcp-stack.yaml + plugins_only: Only build plugins, skip gateway + specific_plugins: List of specific plugin names to build + no_cache: Disable build cache + copy_env_templates: Copy .env.template files from cloned repos + + Raises: + Exception: If build fails for any component + """ + config = load_config(config_file) + + # Build gateway (unless plugins_only=True) + if not plugins_only: + gateway = config.gateway + if gateway.repo: + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + task = progress.add_task("Building gateway...", total=None) + try: + self._build_component(gateway, config, "gateway", no_cache=no_cache) + progress.update(task, completed=1, description="[green]✓ Built gateway[/green]") + except Exception as e: + progress.update(task, completed=1, description="[red]✗ Failed gateway[/red]") + # Print full error after progress bar closes + self.console.print("\n[red bold]Gateway build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + elif self.verbose: + self.console.print("[dim]Skipping gateway build (using pre-built image)[/dim]") + + # Build plugins + plugins = config.plugins + + if specific_plugins: + plugins = [p for p in plugins if p.name in specific_plugins] + + if not plugins: + self.console.print("[yellow]No plugins to build[/yellow]") + return + + with Progress(SpinnerColumn(), TextColumn("[progress.description]{task.description}"), console=self.console) as progress: + + for plugin in plugins: + plugin_name = plugin.name + + # Skip if pre-built image specified + if plugin.image and not plugin.repo: + task = progress.add_task(f"Skipping {plugin_name} (using pre-built image)", total=1) + progress.update(task, completed=1) + continue + + task = progress.add_task(f"Building {plugin_name}...", total=None) + + try: + self._build_component(plugin, config, plugin_name, no_cache=no_cache, copy_env_templates=copy_env_templates) + progress.update(task, completed=1, description=f"[green]✓ Built {plugin_name}[/green]") + except Exception as e: + progress.update(task, completed=1, description=f"[red]✗ Failed {plugin_name}[/red]") + # Print full error after progress bar closes + self.console.print(f"\n[red bold]Plugin '{plugin_name}' build failed:[/red bold]") + self.console.print(f"[red]{type(e).__name__}: {str(e)}[/red]") + if self.verbose: + # Standard + import traceback + + self.console.print(f"[dim]{traceback.format_exc()}[/dim]") + raise + + async def generate_certificates(self, config_file: str) -> None: + """Generate mTLS certificates for plugins. + + Supports two modes: + 1. Local generation (use_cert_manager=false): Uses Makefile to generate certificates locally + 2. cert-manager (use_cert_manager=true): Skips local generation, cert-manager will create certificates + + Args: + config_file: Path to mcp-stack.yaml + + Raises: + RuntimeError: If make command not found (when using local generation) + """ + config = load_config(config_file) + + # Check if using cert-manager + cert_config = config.certificates + use_cert_manager = cert_config.use_cert_manager if cert_config else False + validity_days = cert_config.validity_days if cert_config else 825 + + if use_cert_manager: + # Skip local generation - cert-manager will handle certificate creation + if self.verbose: + self.console.print("[blue]Using cert-manager for certificate management[/blue]") + self.console.print("[dim]Skipping local certificate generation (cert-manager will create certificates)[/dim]") + return + + # Local certificate generation (backward compatibility) + if self.verbose: + self.console.print("[blue]Generating mTLS certificates locally...[/blue]") + + # Check if make is available + if not shutil.which("make"): + raise RuntimeError("'make' command not found. Cannot generate certificates.") + + # Generate CA + self._run_command(["make", "certs-mcp-ca", f"MCP_CERT_DAYS={validity_days}"]) + + # Generate gateway cert + self._run_command(["make", "certs-mcp-gateway", f"MCP_CERT_DAYS={validity_days}"]) + + # Generate plugin certificates + plugins = config.plugins + for plugin in plugins: + plugin_name = plugin.name + self._run_command(["make", "certs-mcp-plugin", f"PLUGIN_NAME={plugin_name}", f"MCP_CERT_DAYS={validity_days}"]) + + if self.verbose: + self.console.print("[green]✓ Certificates generated locally[/green]") + + async def deploy(self, config_file: str, dry_run: bool = False, skip_build: bool = False, skip_certs: bool = False, output_dir: Optional[str] = None) -> None: + """Deploy MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + dry_run: Generate manifests without deploying + skip_build: Skip building containers + skip_certs: Skip certificate generation + output_dir: Output directory for manifests (default: ./deploy) + + Raises: + ValueError: If unsupported deployment type specified + """ + config = load_config(config_file) + + # Build containers + if not skip_build: + await self.build(config_file) + + # Generate certificates (only if mTLS is enabled) + gateway_mtls = config.gateway.mtls_enabled if config.gateway.mtls_enabled is not None else True + plugin_mtls = any((p.mtls_enabled if p.mtls_enabled is not None else True) for p in config.plugins) + mtls_needed = gateway_mtls or plugin_mtls + + if not skip_certs and mtls_needed: + await self.generate_certificates(config_file) + elif not skip_certs and not mtls_needed: + if self.verbose: + self.console.print("[dim]Skipping certificate generation (mTLS disabled)[/dim]") + + # Generate manifests + manifests_dir = self.generate_manifests(config_file, output_dir=output_dir) + + if dry_run: + self.console.print(f"[yellow]Dry-run: Manifests generated in {manifests_dir}[/yellow]") + return + + # Apply deployment + deployment_type = config.deployment.type + + if deployment_type == "kubernetes": + self._deploy_kubernetes(manifests_dir) + elif deployment_type == "compose": + self._deploy_compose(manifests_dir) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + async def verify(self, config_file: str, wait: bool = False, timeout: int = 300) -> None: + """Verify deployment health. + + Args: + config_file: Path to mcp-stack.yaml + wait: Wait for deployment to be ready + timeout: Wait timeout in seconds + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if self.verbose: + self.console.print("[blue]Verifying deployment...[/blue]") + + if deployment_type == "kubernetes": + self._verify_kubernetes(config, wait=wait, timeout=timeout) + elif deployment_type == "compose": + self._verify_compose(config, wait=wait, timeout=timeout) + + async def destroy(self, config_file: str) -> None: + """Destroy deployed MCP stack. + + Args: + config_file: Path to mcp-stack.yaml + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if self.verbose: + self.console.print("[blue]Destroying deployment...[/blue]") + + if deployment_type == "kubernetes": + self._destroy_kubernetes(config) + elif deployment_type == "compose": + self._destroy_compose(config) + + def generate_manifests(self, config_file: str, output_dir: Optional[str] = None) -> Path: + """Generate deployment manifests. + + Args: + config_file: Path to mcp-stack.yaml + output_dir: Output directory for manifests + + Returns: + Path to generated manifests directory + + Raises: + ValueError: If unsupported deployment type specified + """ + config = load_config(config_file) + deployment_type = config.deployment.type + + if output_dir is None: + deploy_dir = get_deploy_dir() + # Separate subdirectories for kubernetes and compose + output_dir = deploy_dir / "manifests" / deployment_type + else: + output_dir = Path(output_dir) + + output_dir.mkdir(parents=True, exist_ok=True) + + # Store output dir for later use + self._last_output_dir = output_dir + + # Generate plugin config.yaml for gateway (shared function) + generate_plugin_config(config, output_dir, verbose=self.verbose) + + if deployment_type == "kubernetes": + generate_kubernetes_manifests(config, output_dir, verbose=self.verbose) + elif deployment_type == "compose": + generate_compose_manifests(config, output_dir, verbose=self.verbose) + else: + raise ValueError(f"Unsupported deployment type: {deployment_type}") + + return output_dir + + # Private helper methods + + def _detect_container_engine(self, config: MCPStackConfig) -> str: + """Detect available container engine (docker or podman). + + Supports both engine names ("docker", "podman") and full paths ("/opt/podman/bin/podman"). + + Returns: + Name or full path to available engine + + Raises: + RuntimeError: If no container engine found + """ + if config.deployment.container_engine: + engine = config.deployment.container_engine + + # Check if it's a full path + if "/" in engine: + if Path(engine).exists() and Path(engine).is_file(): + return engine + else: + raise RuntimeError(f"Specified container engine path does not exist: {engine}") + + # Otherwise treat as command name and check PATH + if shutil.which(engine): + return engine + else: + raise RuntimeError(f"Unable to find specified container engine: {engine}") + + # Auto-detect + if shutil.which("docker"): + return "docker" + elif shutil.which("podman"): + return "podman" + else: + raise RuntimeError("No container engine found. Install docker or podman.") + + def _run_command(self, cmd: List[str], cwd: Optional[Path] = None, capture_output: bool = False) -> subprocess.CompletedProcess: + """Run a shell command. + + Args: + cmd: Command and arguments + cwd: Working directory + capture_output: Capture stdout/stderr + + Returns: + CompletedProcess instance + + Raises: + subprocess.CalledProcessError: If command fails + """ + if self.verbose: + self.console.print(f"[dim]Running: {' '.join(cmd)}[/dim]") + + result = subprocess.run(cmd, cwd=cwd, capture_output=capture_output, text=True, check=True) + + return result + + def _build_component(self, component: BuildableConfig, config: MCPStackConfig, component_name: str, no_cache: bool = False, copy_env_templates: bool = False) -> None: + """Build a component (gateway or plugin) container using docker/podman. + + Args: + component: Component configuration (GatewayConfig or PluginConfig) + config: Overall stack configuration + component_name: Name of the component (gateway or plugin name) + no_cache: Disable cache + copy_env_templates: Copy .env.template from repo if it exists + + Raises: + ValueError: If component has no repo field + FileNotFoundError: If build context or containerfile not found + """ + repo = component.repo + + container_engine = self._detect_container_engine(config) + + if not repo: + raise ValueError(f"Component '{component_name}' has no 'repo' field") + + # Clone repository + git_ref = component.ref or "main" + clone_dir = Path(f"./build/{component_name}") + clone_dir.mkdir(parents=True, exist_ok=True) + + # Clone or update repo + if (clone_dir / ".git").exists(): + if self.verbose: + self.console.print(f"[dim]Updating {component_name} repository...[/dim]") + self._run_command(["git", "fetch", "origin", git_ref], cwd=clone_dir) + # Checkout what we just fetched (FETCH_HEAD) + self._run_command(["git", "checkout", "FETCH_HEAD"], cwd=clone_dir) + else: + if self.verbose: + self.console.print(f"[dim]Cloning {component_name} repository...[/dim]") + self._run_command(["git", "clone", "--branch", git_ref, "--depth", "1", repo, str(clone_dir)]) + + # Determine build context (subdirectory within repo) + build_context = component.context or "." + build_dir = clone_dir / build_context + + if not build_dir.exists(): + raise FileNotFoundError(f"Build context not found: {build_dir}") + + # Detect Containerfile/Dockerfile + containerfile = component.containerfile or "Containerfile" + containerfile_path = build_dir / containerfile + + if not containerfile_path.exists(): + containerfile = "Dockerfile" + containerfile_path = build_dir / containerfile + if not containerfile_path.exists(): + raise FileNotFoundError(f"No Containerfile or Dockerfile found in {build_dir}") + + # Build container - determine image tag + if component.image: + # Use explicitly specified image name + image_tag = component.image + else: + # Generate default image name based on component type + image_tag = f"mcpgateway-{component_name.lower()}:latest" + + build_cmd = [container_engine, "build", "-f", containerfile, "-t", image_tag] + + if no_cache: + build_cmd.append("--no-cache") + + # Add target stage if specified (for multi-stage builds) + if component.target: + build_cmd.extend(["--target", component.target]) + + # For Docker, add --load to ensure image is loaded into daemon + # (needed for buildx/docker-container driver) + if container_engine == "docker": + build_cmd.append("--load") + + build_cmd.append(".") + + self._run_command(build_cmd, cwd=build_dir) + + # Handle registry operations (tag and push if enabled) + image_tag = handle_registry_operations(component, component_name, image_tag, container_engine, verbose=self.verbose) + + # Copy .env.template if requested and exists + if copy_env_templates: + copy_template(component_name, build_dir, verbose=self.verbose) + + if self.verbose: + self.console.print(f"[green]✓ Built {component_name} -> {image_tag}[/green]") + + def _deploy_kubernetes(self, manifests_dir: Path) -> None: + """Deploy to Kubernetes using kubectl. + + Uses shared deploy_kubernetes() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing Kubernetes manifests + """ + deploy_kubernetes(manifests_dir, verbose=self.verbose) + + def _deploy_compose(self, manifests_dir: Path) -> None: + """Deploy using Docker Compose. + + Uses shared deploy_compose() from common.py to avoid code duplication. + + Args: + manifests_dir: Path to directory containing compose manifest + """ + compose_file = manifests_dir / "docker-compose.yaml" + deploy_compose(compose_file, verbose=self.verbose) + + def _verify_kubernetes(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: + """Verify Kubernetes deployment health. + + Uses shared verify_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + wait: Wait for pods to be ready + timeout: Wait timeout in seconds + """ + namespace = config.deployment.namespace or "mcp-gateway" + output = verify_kubernetes(namespace, wait=wait, timeout=timeout, verbose=self.verbose) + self.console.print(output) + + def _verify_compose(self, config: MCPStackConfig, wait: bool = False, timeout: int = 300) -> None: + """Verify Docker Compose deployment health. + + Uses shared verify_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + wait: Wait for containers to be ready + timeout: Wait timeout in seconds + """ + _ = config, wait, timeout # Reserved for future use + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + output = verify_compose(compose_file, verbose=self.verbose) + self.console.print(output) + + def _destroy_kubernetes(self, config: MCPStackConfig) -> None: + """Destroy Kubernetes deployment. + + Uses shared destroy_kubernetes() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + """ + _ = config # Reserved for future use (namespace, labels, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + manifests_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "kubernetes") + destroy_kubernetes(manifests_dir, verbose=self.verbose) + + def _destroy_compose(self, config: MCPStackConfig) -> None: + """Destroy Docker Compose deployment. + + Uses shared destroy_compose() from common.py to avoid code duplication. + + Args: + config: Parsed configuration Pydantic model + """ + _ = config # Reserved for future use (project name, networks, etc.) + # Use the same manifests directory as generate_manifests + deploy_dir = get_deploy_dir() + output_dir = getattr(self, "_last_output_dir", deploy_dir / "manifests" / "compose") + compose_file = output_dir / "docker-compose.yaml" + destroy_compose(compose_file, verbose=self.verbose) diff --git a/mcpgateway/tools/builder/schema.py b/mcpgateway/tools/builder/schema.py new file mode 100644 index 000000000..3f3220049 --- /dev/null +++ b/mcpgateway/tools/builder/schema.py @@ -0,0 +1,264 @@ +"""Location: ./mcpgateway/tools/builder/schema.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Pydantic schemas for MCP Stack configuration validation""" + +# Standard +from typing import Any, Dict, List, Literal, Optional + +# Third-Party +from pydantic import BaseModel, ConfigDict, Field, field_validator + + +class OpenShiftConfig(BaseModel): + """OpenShift-specific configuration. + + Routes are OpenShift's native way of exposing services externally (predates Kubernetes Ingress). + They provide built-in TLS termination and are integrated with OpenShift's router/HAProxy infrastructure. + + Attributes: + create_routes: Create OpenShift Route resources for external access (default: False) + domain: OpenShift apps domain for route hostnames (default: auto-detected from cluster) + tls_termination: TLS termination mode - edge, passthrough, or reencrypt (default: edge) + """ + + create_routes: bool = Field(False, description="Create OpenShift Route resources") + domain: Optional[str] = Field(None, description="OpenShift apps domain (e.g., apps-crc.testing)") + tls_termination: Literal["edge", "passthrough", "reencrypt"] = Field("edge", description="TLS termination mode") + + +class DeploymentConfig(BaseModel): + """Deployment configuration""" + + type: Literal["kubernetes", "compose"] = Field(..., description="Deployment type") + container_engine: Optional[str] = Field(default=None, description="Container engine: 'podman', 'docker', or full path (e.g., '/opt/podman/bin/podman')") + project_name: Optional[str] = Field(None, description="Project name for compose") + namespace: Optional[str] = Field(None, description="Namespace for Kubernetes") + openshift: Optional[OpenShiftConfig] = Field(None, description="OpenShift-specific configuration") + + +class RegistryConfig(BaseModel): + """Container registry configuration. + + Optional configuration for pushing built images to a container registry. + When enabled, images will be tagged with the full registry path and optionally pushed. + + Authentication: + Users must authenticate to the registry before running the build: + - Docker Hub: `docker login` + - Quay.io: `podman login quay.io` + - OpenShift internal: `podman login $(oc registry info) -u $(oc whoami) -p $(oc whoami -t)` + - Private registry: `podman login your-registry.com -u username` + + Attributes: + enabled: Enable registry integration (default: False) + url: Registry URL (e.g., "docker.io", "quay.io", "default-route-openshift-image-registry.apps-crc.testing") + namespace: Registry namespace/organization/project (e.g., "myorg", "mcp-gateway-test") + push: Push image after build (default: True) + image_pull_policy: Kubernetes imagePullPolicy (default: "IfNotPresent") + """ + + enabled: bool = Field(False, description="Enable registry push") + url: Optional[str] = Field(None, description="Registry URL (e.g., docker.io, quay.io, or internal registry)") + namespace: Optional[str] = Field(None, description="Registry namespace/organization/project") + push: bool = Field(True, description="Push image after build") + image_pull_policy: Optional[str] = Field("IfNotPresent", description="Kubernetes imagePullPolicy (IfNotPresent, Always, Never)") + + +class BuildableConfig(BaseModel): + """Base class for components that can be built from source or use pre-built images. + + This base class provides common configuration for both gateway and plugins, + supporting two build modes: + 1. Pre-built image: Specify only 'image' field + 2. Build from source: Specify 'repo' and optionally 'ref', 'context', 'containerfile', 'target' + + Attributes: + image: Pre-built Docker image name (e.g., "mcpgateway/mcpgateway:latest") + repo: Git repository URL to build from + ref: Git branch/tag/commit to checkout (default: "main") + context: Build context subdirectory within repo (default: ".") + containerfile: Path to Containerfile/Dockerfile (default: "Containerfile") + target: Target stage for multi-stage builds (optional) + host_port: Host port mapping for direct access (optional) + env_vars: Environment variables for container + env_file: Path to environment file (.env) + mtls_enabled: Enable mutual TLS authentication (default: True) + """ + + # Allow attribute assignment after model creation (needed for auto-detection of env_file) + model_config = ConfigDict(validate_assignment=True) + + # Build configuration + image: Optional[str] = Field(None, description="Pre-built Docker image") + repo: Optional[str] = Field(None, description="Git repository URL") + ref: Optional[str] = Field("main", description="Git branch/tag/commit") + context: Optional[str] = Field(".", description="Build context subdirectory") + containerfile: Optional[str] = Field("Containerfile", description="Containerfile path") + target: Optional[str] = Field(None, description="Multi-stage build target") + + # Runtime configuration + host_port: Optional[int] = Field(None, description="Host port mapping") + env_vars: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Environment variables") + env_file: Optional[str] = Field(None, description="Path to environment file (.env)") + mtls_enabled: Optional[bool] = Field(True, description="Enable mTLS") + + # Registry configuration + registry: Optional[RegistryConfig] = Field(None, description="Container registry configuration") + + def model_post_init(self, __context: Any) -> None: + """Validate that either image or repo is specified + + Raises: + ValueError: If neither image nor repo is specified + """ + if not self.image and not self.repo: + component_type = self.__class__.__name__.replace("Config", "") + raise ValueError(f"{component_type} must specify either 'image' or 'repo'") + + +class GatewayConfig(BuildableConfig): + """Gateway configuration. + + Extends BuildableConfig to support either pre-built gateway images or + building the gateway from source repository. + + Attributes: + port: Gateway internal port (default: 4444) + """ + + port: Optional[int] = Field(4444, description="Gateway port") + + +class PluginConfig(BuildableConfig): + """Plugin configuration. + + Extends BuildableConfig to support plugin-specific configuration while + inheriting common build and runtime capabilities. + + Attributes: + name: Unique plugin identifier + port: Plugin internal port (default: 8000) + expose_port: Whether to expose plugin port on host (default: False) + plugin_overrides: Plugin-specific override configuration + """ + + name: str = Field(..., description="Plugin name") + port: Optional[int] = Field(8000, description="Plugin port") + expose_port: Optional[bool] = Field(False, description="Expose port on host") + plugin_overrides: Optional[Dict[str, Any]] = Field(default_factory=dict, description="Plugin overrides") + + @field_validator("name") + @classmethod + def validate_name(cls, v: str) -> str: + """Validate plugin name is non-empty + + Args: + v: Plugin name value to validate + + Returns: + Validated plugin name + + Raises: + ValueError: If plugin name is empty or whitespace only + """ + if not v or not v.strip(): + raise ValueError("Plugin name cannot be empty") + return v + + +class CertificatesConfig(BaseModel): + """Certificate configuration. + + Supports two modes: + 1. Local certificate generation (use_cert_manager=false, default): + - Certificates generated locally using OpenSSL (via Makefile) + - Deployed to Kubernetes as secrets via kubectl + - Manual rotation required before expiry + + 2. cert-manager integration (use_cert_manager=true, Kubernetes only): + - Certificates managed by cert-manager controller + - Automatic renewal before expiry (default: at 2/3 of lifetime) + - Native Kubernetes Certificate resources + - Requires cert-manager to be installed in cluster + + Attributes: + validity_days: Certificate validity period in days (default: 825 ≈ 2.25 years) + auto_generate: Auto-generate certificates locally (default: True) + use_cert_manager: Use cert-manager for certificate management (default: False, Kubernetes only) + cert_manager_issuer: Name of cert-manager Issuer/ClusterIssuer (default: "mcp-ca-issuer") + cert_manager_kind: Type of issuer - Issuer or ClusterIssuer (default: "Issuer") + ca_path: Path to CA certificates for local generation (default: "./certs/mcp/ca") + gateway_path: Path to gateway certificates for local generation (default: "./certs/mcp/gateway") + plugins_path: Path to plugin certificates for local generation (default: "./certs/mcp/plugins") + """ + + validity_days: Optional[int] = Field(825, description="Certificate validity in days") + auto_generate: Optional[bool] = Field(True, description="Auto-generate certificates locally") + + # cert-manager integration (Kubernetes only) + use_cert_manager: Optional[bool] = Field(False, description="Use cert-manager for certificate management (Kubernetes only)") + cert_manager_issuer: Optional[str] = Field("mcp-ca-issuer", description="cert-manager Issuer/ClusterIssuer name") + cert_manager_kind: Optional[Literal["Issuer", "ClusterIssuer"]] = Field("Issuer", description="cert-manager issuer kind") + + ca_path: Optional[str] = Field("./certs/mcp/ca", description="CA certificate path") + gateway_path: Optional[str] = Field("./certs/mcp/gateway", description="Gateway cert path") + plugins_path: Optional[str] = Field("./certs/mcp/plugins", description="Plugins cert path") + + +class PostgresConfig(BaseModel): + """PostgreSQL database configuration""" + + enabled: Optional[bool] = Field(True, description="Enable PostgreSQL deployment") + image: Optional[str] = Field("quay.io/sclorg/postgresql-15-c9s:latest", description="PostgreSQL image (default is OpenShift-compatible)") + database: Optional[str] = Field("mcp", description="Database name") + user: Optional[str] = Field("postgres", description="Database user") + password: Optional[str] = Field("mysecretpassword", description="Database password") + storage_size: Optional[str] = Field("10Gi", description="Persistent volume size (Kubernetes only)") + storage_class: Optional[str] = Field(None, description="Storage class name (Kubernetes only)") + + +class RedisConfig(BaseModel): + """Redis cache configuration""" + + enabled: Optional[bool] = Field(True, description="Enable Redis deployment") + image: Optional[str] = Field("redis:latest", description="Redis image") + + +class InfrastructureConfig(BaseModel): + """Infrastructure services configuration""" + + postgres: Optional[PostgresConfig] = Field(default_factory=PostgresConfig) + redis: Optional[RedisConfig] = Field(default_factory=RedisConfig) + + +class MCPStackConfig(BaseModel): + """Complete MCP Stack configuration""" + + deployment: DeploymentConfig + gateway: GatewayConfig + plugins: List[PluginConfig] = Field(default_factory=list) + certificates: Optional[CertificatesConfig] = Field(default_factory=CertificatesConfig) + infrastructure: Optional[InfrastructureConfig] = Field(default_factory=InfrastructureConfig) + + @field_validator("plugins") + @classmethod + def validate_plugin_names_unique(cls, v: List[PluginConfig]) -> List[PluginConfig]: + """Ensure plugin names are unique + + Args: + v: List of plugin configurations to validate + + Returns: + Validated list of plugin configurations + + Raises: + ValueError: If duplicate plugin names are found + """ + names = [p.name for p in v] + if len(names) != len(set(names)): + duplicates = [name for name in names if names.count(name) > 1] + raise ValueError(f"Duplicate plugin names found: {duplicates}") + return v diff --git a/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 new file mode 100644 index 000000000..aaf2fc04e --- /dev/null +++ b/mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 @@ -0,0 +1,198 @@ +# Location: ./mcpgateway/tools/builder/templates/compose/docker-compose.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Docker Compose manifest for MCP Stack +# Generated from mcp-stack.yaml + +version: '3.8' + +networks: + mcp-network: + driver: bridge + +volumes: + gateway-data: + driver: local + pgdata: + driver: local +{% for plugin in plugins %} + {{ plugin.name | lower }}-data: + driver: local +{% endfor %} + +services: + # MCP Gateway + mcpgateway: + image: {{ gateway.image }} + container_name: mcpgateway + hostname: mcpgateway + + {% if gateway.env_file is defined %} + env_file: + - {{ gateway.env_file }} + {% endif %} + + environment: + {% if gateway.env_vars is defined and gateway.env_vars %} + # User-defined environment variables + {% for key, value in gateway.env_vars.items() %} + - {{ key }}={{ value }} + {% endfor %} + {% endif %} + # Database configuration + - DATABASE_URL=postgresql://postgres:$${POSTGRES_PASSWORD:-mysecretpassword}@postgres:5432/mcp + - REDIS_URL=redis://redis:6379/0 + {% if gateway.mtls_enabled | default(true) %} + # mTLS client configuration (gateway connects to external plugins) + - PLUGINS_CLIENT_MTLS_CA_BUNDLE=/app/certs/mcp/ca/ca.crt + - PLUGINS_CLIENT_MTLS_CERTFILE=/app/certs/mcp/gateway/client.crt + - PLUGINS_CLIENT_MTLS_KEYFILE=/app/certs/mcp/gateway/client.key + - PLUGINS_CLIENT_MTLS_VERIFY={{ gateway.mtls_verify | default('true') }} + - PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME={{ gateway.mtls_check_hostname | default('false') }} + {% endif %} + + ports: + - "{{ gateway.host_port | default(4444) }}:{{ gateway.port | default(4444) }}" + + volumes: + - gateway-data:/app/data + {% if gateway.mtls_enabled | default(true) %} + - {{ cert_paths.gateway_cert_dir }}:/app/certs/mcp/gateway:ro + - {{ cert_paths.ca_cert_file }}:/app/certs/mcp/ca/ca.crt:ro + {% endif %} + # Auto-generated plugin configuration + - ./plugins-config.yaml:/app/config/plugins.yaml:ro + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ gateway.port | default(4444) }}/health').read()"] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_started +{% for plugin in plugins %} {{ plugin.name | lower }}: + condition: service_started +{% endfor %} + +{% for plugin in plugins %} + # Plugin: {{ plugin.name }} + {{ plugin.name | lower }}: + image: {{ plugin.image | default('mcpgateway-' + plugin.name | lower + ':latest') }} + container_name: mcp-plugin-{{ plugin.name | lower }} + hostname: {{ plugin.name | lower }} + + {% if plugin.env_file is defined %} + env_file: + - {{ plugin.env_file }} + {% endif %} + + environment: + {% if plugin.env_vars is defined and plugin.env_vars %} + # User-defined environment variables + {% for key, value in plugin.env_vars.items() %} + - {{ key }}={{ value }} + {% endfor %} + {% endif %} + {% if plugin.mtls_enabled | default(true) %} + # mTLS server configuration (plugin accepts gateway connections) + - PLUGINS_TRANSPORT=http + - PLUGINS_SERVER_HOST=0.0.0.0 + - PLUGINS_SERVER_PORT={{ plugin.port | default(8000) }} + - PLUGINS_SERVER_SSL_ENABLED=true + - PLUGINS_SERVER_SSL_KEYFILE=/app/certs/mcp/server.key + - PLUGINS_SERVER_SSL_CERTFILE=/app/certs/mcp/server.crt + - PLUGINS_SERVER_SSL_CA_CERTS=/app/certs/mcp/ca.crt + - PLUGINS_SERVER_SSL_CERT_REQS=2 # CERT_REQUIRED - enforce client certificates + {% endif %} + + {% if plugin.expose_port | default(false) %} + ports: + - "{{ plugin.host_port }}:{{ plugin.port | default(8000) }}" + {% endif %} + + volumes: + - {{ plugin.name | lower }}-data:/app/data + {% if plugin.mtls_enabled | default(true) %} + - {{ cert_paths.plugins_cert_base }}/{{ plugin.name }}:/app/certs/mcp:ro + {% endif %} + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + {% if plugin.mtls_enabled | default(true) %} + # When mTLS is enabled, health check uses separate HTTP server on port+1000 + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ (plugin.port | default(8000)) + 1000 }}/health').read()"] + {% else %} + # When mTLS is disabled, health check uses main server + test: ["CMD", "python3", "-c", "import urllib.request; urllib.request.urlopen('http://localhost:{{ plugin.port | default(8000) }}/health').read()"] + {% endif %} + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + {% if plugin.depends_on is defined %} + depends_on: + {% for dep in plugin.depends_on %} + - {{ dep }} + {% endfor %} + {% endif %} + +{% endfor %} + # PostgreSQL Database + postgres: + image: postgres:17 + container_name: mcp-postgres + hostname: postgres + + environment: + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=$${POSTGRES_PASSWORD:-mysecretpassword} + - POSTGRES_DB=mcp + + ports: + - "5432:5432" + + volumes: + - pgdata:/var/lib/postgresql/data + + networks: + - mcp-network + + restart: unless-stopped + + healthcheck: + test: ["CMD-SHELL", "pg_isready -U postgres"] + interval: 30s + timeout: 5s + retries: 5 + start_period: 20s + + # Redis Cache + redis: + image: redis:latest + container_name: mcp-redis + hostname: redis + + ports: + - "6379:6379" + + networks: + - mcp-network + + restart: unless-stopped + diff --git a/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 new file mode 100644 index 000000000..e11963573 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 @@ -0,0 +1,62 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/cert-manager-certificates.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# cert-manager Certificate Resources +# Gateway Certificate +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-{{ gateway_name }}-cert + namespace: {{ namespace }} +spec: + secretName: mcp-{{ gateway_name }}-server-cert + duration: {{ duration }}h + renewBefore: {{ renew_before }}h + isCA: false + privateKey: + algorithm: RSA + size: 2048 + usages: + - digital signature + - key encipherment + - server auth + - client auth + dnsNames: + - {{ gateway_name }} + - {{ gateway_name }}.{{ namespace }} + - {{ gateway_name }}.{{ namespace }}.svc + - {{ gateway_name }}.{{ namespace }}.svc.cluster.local + issuerRef: + name: {{ issuer_name }} + kind: {{ issuer_kind }} +{% for plugin in plugins %} +--- +# Plugin {{ plugin.name }} Certificate +apiVersion: cert-manager.io/v1 +kind: Certificate +metadata: + name: mcp-{{ plugin.name }}-cert + namespace: {{ namespace }} +spec: + secretName: mcp-{{ plugin.name }}-server-cert + duration: {{ duration }}h + renewBefore: {{ renew_before }}h + isCA: false + privateKey: + algorithm: RSA + size: 2048 + usages: + - digital signature + - key encipherment + - server auth + - client auth + dnsNames: + - {{ plugin.name }} + - {{ plugin.name }}.{{ namespace }} + - {{ plugin.name }}.{{ namespace }}.svc + - {{ plugin.name }}.{{ namespace }}.svc.cluster.local + issuerRef: + name: {{ issuer_name }} + kind: {{ issuer_kind }} +{% endfor %} diff --git a/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 new file mode 100644 index 000000000..67e5a1e87 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 @@ -0,0 +1,38 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/cert-secrets.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# mTLS Certificate Secrets +# CA Certificate (shared by all components) +apiVersion: v1 +kind: Secret +metadata: + name: mcp-ca-secret + namespace: {{ namespace }} +type: Opaque +data: + ca.crt: {{ ca_cert_b64 }} +--- +# Gateway Client Certificate +apiVersion: v1 +kind: Secret +metadata: + name: mcp-{{ gateway_name }}-server-cert + namespace: {{ namespace }} +type: kubernetes.io/tls +data: + tls.crt: {{ gateway_cert_b64 }} + tls.key: {{ gateway_key_b64 }} +{% for plugin in plugins %} +--- +# Plugin {{ plugin.name }} Server Certificate +apiVersion: v1 +kind: Secret +metadata: + name: mcp-{{ plugin.name }}-server-cert + namespace: {{ namespace }} +type: kubernetes.io/tls +data: + tls.crt: {{ plugin.cert_b64 }} + tls.key: {{ plugin.key_b64 }} +{% endfor %} diff --git a/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 new file mode 100644 index 000000000..843bb5fd4 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 @@ -0,0 +1,248 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/deployment.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Kubernetes Deployment for {{ name }} +apiVersion: v1 +kind: Namespace +metadata: + name: {{ namespace }} +--- +apiVersion: v1 +kind: Secret +metadata: + name: {{ name }}-env + namespace: {{ namespace }} +type: Opaque +stringData: +{% if env_vars is defined and env_vars %} + # Environment variables + # NOTE: In production, these should come from CI/CD vault secrets +{% for key, value in env_vars.items() %} + {{ key }}: "{{ value }}" +{% endfor %} +{% endif %} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ name }} + namespace: {{ namespace }} + labels: + app: {{ name }} + component: {% if name == 'mcpgateway' %}gateway{% else %}plugin{% endif %} +spec: + replicas: {{ replicas | default(1) }} + selector: + matchLabels: + app: {{ name }} + template: + metadata: + labels: + app: {{ name }} + component: {% if name == 'mcpgateway' %}gateway{% else %}plugin{% endif %} + spec: + {% if image_pull_secret is defined %} + imagePullSecrets: + - name: {{ image_pull_secret }} + {% endif %} + + {% if init_containers is defined %} + initContainers: + {% for init_container in init_containers %} + - name: {{ init_container.name }} + image: {{ init_container.image }} + command: {{ init_container.command | tojson }} + {% endfor %} + {% endif %} + + containers: + - name: {{ name }} + image: {{ image }} + imagePullPolicy: {{ image_pull_policy | default('IfNotPresent') }} + + ports: + - name: http + containerPort: {{ port | default(8000) }} + protocol: TCP + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} + - name: health + containerPort: 9000 + protocol: TCP + {% endif %} + + env: + {% if mtls_enabled | default(true) %} + {% if name == 'mcpgateway' %} + # mTLS client configuration (gateway connects to plugins) + - name: PLUGINS_CLIENT_MTLS_CA_BUNDLE + value: "/app/certs/ca/ca.crt" + - name: PLUGINS_CLIENT_MTLS_CERTFILE + value: "/app/certs/mcp/tls.crt" + - name: PLUGINS_CLIENT_MTLS_KEYFILE + value: "/app/certs/mcp/tls.key" + - name: PLUGINS_CLIENT_MTLS_VERIFY + value: "{{ mtls_verify | default('true') }}" + - name: PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME + value: "{{ mtls_check_hostname | default('false') }}" + {% else %} + # mTLS server configuration (plugin accepts gateway connections) + - name: PLUGINS_TRANSPORT + value: "http" + - name: PLUGINS_SERVER_HOST + value: "0.0.0.0" + - name: PLUGINS_SERVER_PORT + value: "{{ port | default(8000) }}" + - name: PLUGINS_SERVER_SSL_ENABLED + value: "true" + - name: PLUGINS_SERVER_SSL_KEYFILE + value: "/app/certs/mcp/tls.key" + - name: PLUGINS_SERVER_SSL_CERTFILE + value: "/app/certs/mcp/tls.crt" + - name: PLUGINS_SERVER_SSL_CA_CERTS + value: "/app/certs/ca/ca.crt" + - name: PLUGINS_SERVER_SSL_CERT_REQS + value: "2" # CERT_REQUIRED + {% endif %} + {% endif %} + + envFrom: + - secretRef: + name: {{ name }}-env + + {% if health_check | default(true) %} + livenessProbe: + httpGet: + path: /health + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} + # Plugin with mTLS: use separate health check server on port 9000 + port: health + scheme: HTTP + {% else %} + # Gateway or non-mTLS: health check on main HTTP port + port: http + scheme: HTTP + {% endif %} + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + httpGet: + path: /health + {% if mtls_enabled | default(true) and name != 'mcpgateway' %} + # Plugin with mTLS: use separate health check server on port 9000 + port: health + scheme: HTTP + {% else %} + # Gateway or non-mTLS: health check on main HTTP port + port: http + scheme: HTTP + {% endif %} + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + {% endif %} + + resources: + requests: + memory: "{{ memory_request | default('256Mi') }}" + cpu: "{{ cpu_request | default('100m') }}" + limits: + memory: "{{ memory_limit | default('512Mi') }}" + cpu: "{{ cpu_limit | default('500m') }}" + + volumeMounts: + {% if mtls_enabled | default(true) %} + - name: server-cert + mountPath: /app/certs/mcp + readOnly: true + - name: ca-cert + mountPath: /app/certs/ca + readOnly: true + {% endif %} + {% if name == 'mcpgateway' and has_plugins | default(false) %} + - name: plugins-config + mountPath: /app/config + readOnly: true + {% endif %} + + {% if volume_mounts is defined %} + {% for mount in volume_mounts %} + - name: {{ mount.name }} + mountPath: {{ mount.path }} + {% if mount.readonly | default(false) %} + readOnly: true + {% endif %} + {% endfor %} + {% endif %} + + securityContext: + runAsNonRoot: true + {% if run_as_user is defined %} + runAsUser: {{ run_as_user }} + {% endif %} + allowPrivilegeEscalation: false + capabilities: + drop: + - ALL + readOnlyRootFilesystem: false + + volumes: + {% if mtls_enabled | default(true) %} + - name: server-cert + secret: + secretName: mcp-{{ name }}-server-cert + defaultMode: 0444 + - name: ca-cert + secret: + secretName: mcp-ca-secret + defaultMode: 0444 + {% endif %} + {% if name == 'mcpgateway' and has_plugins | default(false) %} + - name: plugins-config + configMap: + name: plugins-config + defaultMode: 0444 + {% endif %} + + {% if volumes is defined %} + {% for volume in volumes %} + - name: {{ volume.name }} + {% if volume.type == 'secret' %} + secret: + secretName: {{ volume.secret_name }} + {% if volume.default_mode is defined %} + defaultMode: {{ volume.default_mode }} + {% endif %} + {% elif volume.type == 'configmap' %} + configMap: + name: {{ volume.configmap_name }} + {% elif volume.type == 'persistentVolumeClaim' %} + persistentVolumeClaim: + claimName: {{ volume.claim_name }} + {% endif %} + {% endfor %} + {% endif %} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ name }} + namespace: {{ namespace }} + labels: + app: {{ name }} +spec: + type: {{ service_type | default('ClusterIP') }} + ports: + - name: http + port: {{ port | default(8000) }} + targetPort: http + protocol: TCP + {% if service_type == 'NodePort' and node_port is defined %} + nodePort: {{ node_port }} + {% endif %} + selector: + app: {{ name }} diff --git a/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 new file mode 100644 index 000000000..d517d8459 --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 @@ -0,0 +1,13 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/plugins-configmap.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# ConfigMap for plugins configuration +apiVersion: v1 +kind: ConfigMap +metadata: + name: plugins-config + namespace: {{ namespace }} +data: + plugins.yaml: | +{{ plugins_config | safe | indent(4, first=True) }} diff --git a/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 new file mode 100644 index 000000000..de58a288e --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 @@ -0,0 +1,125 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/postgres.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# PostgreSQL Database for MCP Gateway +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: postgres-pvc + namespace: {{ namespace }} +spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: {{ storage_size }} + {% if storage_class %} + storageClassName: {{ storage_class }} + {% endif %} +--- +apiVersion: v1 +kind: Secret +metadata: + name: postgres-secret + namespace: {{ namespace }} +type: Opaque +stringData: + # Official PostgreSQL image variables + POSTGRES_USER: {{ user }} + POSTGRES_PASSWORD: {{ password }} + POSTGRES_DB: {{ database }} + # Red Hat/SCL PostgreSQL image variables (OpenShift-compatible) + POSTGRESQL_USER: {{ user }} + POSTGRESQL_PASSWORD: {{ password }} + POSTGRESQL_DATABASE: {{ database }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: postgres + namespace: {{ namespace }} + labels: + app: postgres + component: database +spec: + replicas: 1 + selector: + matchLabels: + app: postgres + template: + metadata: + labels: + app: postgres + component: database + spec: + containers: + - name: postgres + image: {{ image }} + imagePullPolicy: IfNotPresent + + ports: + - name: postgres + containerPort: 5432 + protocol: TCP + + envFrom: + - secretRef: + name: postgres-secret + + volumeMounts: + - name: postgres-data + mountPath: /var/lib/postgresql/data + subPath: postgres + + livenessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ user }} + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + exec: + command: + - /bin/sh + - -c + - pg_isready -U {{ user }} + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + + resources: + requests: + memory: "256Mi" + cpu: "100m" + limits: + memory: "512Mi" + cpu: "500m" + + volumes: + - name: postgres-data + persistentVolumeClaim: + claimName: postgres-pvc +--- +apiVersion: v1 +kind: Service +metadata: + name: postgres + namespace: {{ namespace }} + labels: + app: postgres +spec: + type: ClusterIP + ports: + - name: postgres + port: 5432 + targetPort: postgres + protocol: TCP + selector: + app: postgres diff --git a/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 b/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 new file mode 100644 index 000000000..340e2c71a --- /dev/null +++ b/mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 @@ -0,0 +1,76 @@ +# Location: ./mcpgateway/tools/builder/templates/kubernetes/redis.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Redis Cache for MCP Gateway +apiVersion: apps/v1 +kind: Deployment +metadata: + name: redis + namespace: {{ namespace }} + labels: + app: redis + component: cache +spec: + replicas: 1 + selector: + matchLabels: + app: redis + template: + metadata: + labels: + app: redis + component: cache + spec: + containers: + - name: redis + image: {{ image }} + imagePullPolicy: IfNotPresent + + ports: + - name: redis + containerPort: 6379 + protocol: TCP + + livenessProbe: + tcpSocket: + port: redis + initialDelaySeconds: 30 + periodSeconds: 10 + timeoutSeconds: 5 + failureThreshold: 3 + + readinessProbe: + exec: + command: + - redis-cli + - ping + initialDelaySeconds: 10 + periodSeconds: 5 + timeoutSeconds: 3 + failureThreshold: 3 + + resources: + requests: + memory: "128Mi" + cpu: "50m" + limits: + memory: "256Mi" + cpu: "200m" +--- +apiVersion: v1 +kind: Service +metadata: + name: redis + namespace: {{ namespace }} + labels: + app: redis +spec: + type: ClusterIP + ports: + - name: redis + port: 6379 + targetPort: redis + protocol: TCP + selector: + app: redis diff --git a/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 b/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 new file mode 100644 index 000000000..a8221873a --- /dev/null +++ b/mcpgateway/tools/builder/templates/plugins-config.yaml.j2 @@ -0,0 +1,49 @@ +# Location: ./mcpgateway/tools/builder/templates/compose/plugins-config.yaml.j2 +# Copyright 2025 +# SPDX-License-Identifier: Apache-2.0 +# Authors: Teryl Taylor +# Plugin configuration for MCP Gateway +# Auto-generated from mcp-stack.yaml + +# Global plugin settings +plugin_settings: + parallel_execution_within_band: true + plugin_timeout: 120 + fail_on_plugin_error: false + enable_plugin_api: true + plugin_health_check_interval: 60 + +# External plugin connections +plugins: +{% for plugin in plugins -%} +- name: {{ plugin.name }} + kind: external +{%- if plugin.description %} + description: "{{ plugin.description }}" +{%- endif %} +{%- if plugin.version %} + version: "{{ plugin.version }}" +{%- endif %} +{%- if plugin.author %} + author: "{{ plugin.author }}" +{%- endif %} +{%- if plugin.hooks %} + hooks: {{ plugin.hooks }} +{%- endif %} +{%- if plugin.tags %} + tags: {{ plugin.tags }} +{%- endif %} +{%- if plugin.mode %} + mode: "{{ plugin.mode }}" +{%- endif %} +{%- if plugin.priority %} + priority: {{ plugin.priority }} +{%- endif %} +{%- if plugin.conditions %} + conditions: {{ plugin.conditions }} +{%- endif %} + mcp: + proto: STREAMABLEHTTP + url: {{ plugin.url }} + +{% endfor %} diff --git a/mcpgateway/tools/cli.py b/mcpgateway/tools/cli.py new file mode 100644 index 000000000..3fda994c1 --- /dev/null +++ b/mcpgateway/tools/cli.py @@ -0,0 +1,56 @@ +"""Location: ./mcpgateway/tools/cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +cforge CLI ─ command line tools for building and deploying the +MCP Gateway and its plugins. + +This module is exposed as a **console-script** via: + + [project.scripts] + cforge = "mcpgateway.tools.cli:main" + +so that a user can simply type `cforge ...` to use the CLI. + +Features +───────── +* plugin: + - bootstrap: Creates a new plugin project from template │ + - install: Installs plugins into a Python environment │ + - package: Builds an MCP server to serve plugins as tools +* gateway: + - Validates deploy.yaml configuration + - Builds plugin containers from git repos + - Generates mTLS certificates + - Deploys to Kubernetes or Docker Compose + - Integrates with CI/CD vault secrets + + +Typical usage +───────────── +```console +$ cforge --help +``` +""" + +# Third-Party +import typer + +# First-Party +import mcpgateway.plugins.tools.cli as plugins +import mcpgateway.tools.builder.cli as builder + +app = typer.Typer(help="Command line tools for building, deploying, and interacting with the ContextForge MCP Gateway") + +app.add_typer(plugins.app, name="plugin", help="Manage the plugin lifecycle") +app.add_typer(builder.app, name="gateway", help="Manage the building and deployment of the gateway") + + +def main() -> None: # noqa: D401 - imperative mood is fine here + """Entry point for the *cforge* console script.""" + app(obj={}) + + +if __name__ == "__main__": + main() diff --git a/plugin_templates/external/.env.template b/plugin_templates/external/.env.template index 6d9faf358..0715139d9 100644 --- a/plugin_templates/external/.env.template +++ b/plugin_templates/external/.env.template @@ -21,3 +21,37 @@ PLUGINS_CONFIG=./resources/plugins/config.yaml # Configuration path for chuck mcp runtime CHUK_MCP_CONFIG_PATH=./resources/runtime/config.yaml + +##################################### +# MCP External Plugin Server - mTLS Configuration +##################################### + +# Enable SSL/TLS for external plugin MCP server +# Options: true, false (default) +# When true: Enables HTTPS and optionally mTLS for the plugin MCP server +MCP_SSL_ENABLED=false + +# SSL/TLS Certificate Files +# Path to server private key (required when MCP_SSL_ENABLED=true) +# Generate with: openssl genrsa -out certs/mcp/server.key 2048 +# MCP_SSL_KEYFILE=certs/mcp/server.key + +# Path to server certificate (required when MCP_SSL_ENABLED=true) +# Generate with: openssl req -new -x509 -key certs/mcp/server.key -out certs/mcp/server.crt -days 365 +# MCP_SSL_CERTFILE=certs/mcp/server.crt + +# Optional password for encrypted private key +# MCP_SSL_KEYFILE_PASSWORD= + +# mTLS (Mutual TLS) Configuration +# Client certificate verification mode: +# 0 (CERT_NONE): No client certificate required - standard TLS (default) +# 1 (CERT_OPTIONAL): Client certificate optional - validate if provided +# 2 (CERT_REQUIRED): Client certificate required - full mTLS +# Default: 0 (standard TLS without client verification) +MCP_SSL_CERT_REQS=0 + +# CA certificate bundle for verifying client certificates +# Required when MCP_SSL_CERT_REQS=1 or MCP_SSL_CERT_REQS=2 +# Can be a single CA file or a bundle containing multiple CAs +# MCP_SSL_CA_CERTS=certs/mcp/ca.crt diff --git a/plugin_templates/external/pyproject.toml.jinja b/plugin_templates/external/pyproject.toml.jinja index 6eb6fa286..8bd7aff25 100644 --- a/plugin_templates/external/pyproject.toml.jinja +++ b/plugin_templates/external/pyproject.toml.jinja @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", ] diff --git a/plugins/config.yaml b/plugins/config.yaml index be4e53318..400f5dccb 100644 --- a/plugins/config.yaml +++ b/plugins/config.yaml @@ -469,6 +469,10 @@ plugins: # mcp: # proto: STREAMABLEHTTP # url: http://127.0.0.1:8000/mcp + # # tls: + # # ca_bundle: /app/certs/plugins/ca.crt + # # client_cert: /app/certs/plugins/gateway-client.pem + # # verify: true # Circuit Breaker - trip on high error rates or consecutive failures - name: "CircuitBreaker" diff --git a/plugins/external/config.yaml b/plugins/external/config.yaml index 070220a3c..09edb1ff8 100644 --- a/plugins/external/config.yaml +++ b/plugins/external/config.yaml @@ -5,6 +5,9 @@ plugins: mcp: proto: STREAMABLEHTTP url: http://127.0.0.1:3000/mcp + # tls: + # ca_bundle: /app/certs/plugins/ca.crt + # client_cert: /app/certs/plugins/gateway-client.pem - name: "OPAPluginFilter" kind: "external" @@ -12,6 +15,8 @@ plugins: mcp: proto: STREAMABLEHTTP url: http://127.0.0.1:8000/mcp + # tls: + # verify: true - name: "LLMGuardPlugin" kind: "external" diff --git a/plugins/external/llmguard/pyproject.toml b/plugins/external/llmguard/pyproject.toml index 878530d7a..c53d93e7c 100644 --- a/plugins/external/llmguard/pyproject.toml +++ b/plugins/external/llmguard/pyproject.toml @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", "llm-guard", ] diff --git a/plugins/external/opa/pyproject.toml b/plugins/external/opa/pyproject.toml index 2e789fcad..b9f55b131 100644 --- a/plugins/external/opa/pyproject.toml +++ b/plugins/external/opa/pyproject.toml @@ -44,7 +44,7 @@ authors = [ ] dependencies = [ - "chuk-mcp-runtime>=0.6.5", + "mcp>=1.16.0", "mcp-contextforge-gateway", ] diff --git a/pyproject.toml b/pyproject.toml index 57fb4e766..8b21f343a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ build-backend = "setuptools.build_meta" # ---------------------------------------------------------------- [project] name = "mcp-contextforge-gateway" -version = "0.8.0" +version = "0.7.0" description = "A production-grade MCP Gateway & Proxy built with FastAPI. Supports multi-server registration, virtual server composition, authentication, retry logic, observability, protocol translation, and a unified federated tool catalog." keywords = ["MCP","API","gateway","proxy","tools", "agents","agentic ai","model context protocol","multi-agent","fastapi", @@ -147,14 +147,6 @@ asyncpg = [ "asyncpg>=0.30.0", ] -# Chuck/Chuk MCP Runtime (optional) - External plugin server runtime -# Provides MCP tool decorators, plugin hooks, and multi-transport server support -# Used by: mcpgateway/plugins/framework/external/mcp/server/runtime.py -# Required only if you plan to create external MCP plugin servers -chuck = [ - "chuk-mcp-runtime>=0.6.5", -] - # Optional dependency groups (development) dev = [ "aiohttp>=3.12.15", @@ -164,7 +156,6 @@ dev = [ "black>=25.1.0", "bump2version>=1.0.1", "check-manifest>=0.50", - "chuk-mcp-runtime>=0.6.5", "code2flow>=2.5.1", "cookiecutter>=2.6.0", "coverage>=7.10.6", @@ -259,6 +250,7 @@ Changelog = "https://github.com/IBM/mcp-context-forge/blob/main/CHANGELOG.md" [project.scripts] mcpgateway = "mcpgateway.cli:main" mcpplugins = "mcpgateway.plugins.tools.cli:main" +cforge = "mcpgateway.tools.cli:main" # -------------------------------------------------------------------- # 🔧 setuptools-specific configuration @@ -277,6 +269,9 @@ exclude = ["tests*"] # - templates -> Jinja2 templates shipped at runtime [tool.setuptools.package-data] mcpgateway = [ + "tools/builder/templates/*.yaml.j2", + "tools/builder/templates/compose/*.yaml.j2", + "tools/builder/templates/kubernetes/*.yaml.j2", "py.typed", "static/*.css", "static/*.js", diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py index e70565ddd..427844cb6 100644 --- a/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/server/test_runtime.py @@ -9,6 +9,7 @@ # Standard import asyncio +import json # Third-Party import pytest diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py new file mode 100644 index 000000000..41d85c8cc --- /dev/null +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_certificate_validation.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Tests for TLS/mTLS certificate validation in external plugin client. +""" + +# Standard +import datetime +import ssl +from pathlib import Path +from unittest.mock import Mock, patch + +# Third-Party +from cryptography import x509 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.x509.oid import ExtensionOID, NameOID +import httpx +import pytest + +# First-Party +from mcpgateway.plugins.framework.external.mcp.tls_utils import create_ssl_context +from mcpgateway.plugins.framework.models import MCPClientTLSConfig + + +def generate_self_signed_cert(tmp_path: Path, common_name: str = "localhost", expired: bool = False) -> tuple[Path, Path]: + """Generate a self-signed certificate for testing. + + Args: + tmp_path: Temporary directory path + common_name: Common name for the certificate + expired: If True, create an already-expired certificate + + Returns: + Tuple of (cert_path, key_path) + """ + # Generate private key + private_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Certificate validity period + if expired: + # Create an expired certificate (valid from 2 years ago to 1 year ago) + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=730) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta(days=365) + else: + # Create a valid certificate (valid from now for 365 days) + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=365) + + # Create certificate + subject = issuer = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Org"), + x509.NameAttribute(NameOID.COMMON_NAME, common_name), + ] + ) + + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(private_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(not_valid_before) + .not_valid_after(not_valid_after) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName(common_name)]), + critical=False, + ) + .sign(private_key, hashes.SHA256(), default_backend()) + ) + + # Write certificate + cert_path = tmp_path / f"{common_name}_cert.pem" + cert_path.write_bytes(cert.public_bytes(serialization.Encoding.PEM)) + + # Write private key + key_path = tmp_path / f"{common_name}_key.pem" + key_path.write_bytes( + private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return cert_path, key_path + + +def generate_ca_and_signed_cert(tmp_path: Path, common_name: str = "localhost") -> tuple[Path, Path, Path]: + """Generate a CA certificate and a certificate signed by that CA. + + Args: + tmp_path: Temporary directory path + common_name: Common name for the server certificate + + Returns: + Tuple of (ca_cert_path, server_cert_path, server_key_path) + """ + # Generate CA private key + ca_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Create CA certificate + ca_subject = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test CA"), + x509.NameAttribute(NameOID.COMMON_NAME, "Test CA"), + ] + ) + + ca_cert = ( + x509.CertificateBuilder() + .subject_name(ca_subject) + .issuer_name(ca_subject) + .public_key(ca_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.now(tz=datetime.timezone.utc)) + .not_valid_after(datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=3650)) + .add_extension( + x509.BasicConstraints(ca=True, path_length=None), + critical=True, + ) + .sign(ca_key, hashes.SHA256(), default_backend()) + ) + + # Generate server private key + server_key = rsa.generate_private_key(public_exponent=65537, key_size=4096, backend=default_backend()) + + # Create server certificate signed by CA + server_subject = x509.Name( + [ + x509.NameAttribute(NameOID.COUNTRY_NAME, "US"), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, "California"), + x509.NameAttribute(NameOID.LOCALITY_NAME, "San Francisco"), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, "Test Server"), + x509.NameAttribute(NameOID.COMMON_NAME, common_name), + ] + ) + + server_cert = ( + x509.CertificateBuilder() + .subject_name(server_subject) + .issuer_name(ca_subject) + .public_key(server_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(datetime.datetime.now(tz=datetime.timezone.utc)) + .not_valid_after(datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=365)) + .add_extension( + x509.SubjectAlternativeName([x509.DNSName(common_name)]), + critical=False, + ) + .sign(ca_key, hashes.SHA256(), default_backend()) + ) + + # Write CA certificate + ca_cert_path = tmp_path / "ca_cert.pem" + ca_cert_path.write_bytes(ca_cert.public_bytes(serialization.Encoding.PEM)) + + # Write server certificate + server_cert_path = tmp_path / f"{common_name}_cert.pem" + server_cert_path.write_bytes(server_cert.public_bytes(serialization.Encoding.PEM)) + + # Write server private key + server_key_path = tmp_path / f"{common_name}_key.pem" + server_key_path.write_bytes( + server_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + ) + + return ca_cert_path, server_cert_path, server_key_path + + +def test_ssl_context_configured_for_certificate_validation(tmp_path): + """Test that create_ssl_context() configures SSL context for certificate validation. + + This validates that the SSL context is configured with CERT_REQUIRED mode, + which will reject invalid certificates (like self-signed certs) during + TLS handshake. + + This test validates the actual production code path used in client.py. + Note: This tests configuration, not actual rejection. See + test_ssl_context_rejects_invalid_certificate for rejection behavior. + """ + # Generate self-signed certificate (not signed by a trusted CA) + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="untrusted.example.com") + + # Create TLS config pointing to self-signed cert as CA + # This simulates a server presenting a self-signed certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context using the production utility function + # This is the same function used in client.py for external plugin connections + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context has strict validation enabled + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + assert ssl_context.check_hostname is True + + # Note: We can't easily test the actual connection failure without spinning up + # a real HTTPS server, but we can verify the SSL context is configured correctly + # to reject invalid certificates + + +def test_ssl_context_rejects_invalid_certificate(): + """Test that SSL context with CERT_REQUIRED will reject invalid certificates. + + This test demonstrates the rejection behavior by showing that: + 1. An SSL context created with verify=True has CERT_REQUIRED mode + 2. CERT_REQUIRED mode means OpenSSL will reject invalid certificates during handshake + 3. The rejection is simulated since we can't easily spin up a real HTTPS server + + Per Python SSL docs: "If CERT_REQUIRED is used, the client or server must provide + a valid and trusted certificate. A connection attempt will raise an SSLError if + the certificate validation fails." + + This validates the actual rejection behavior mechanism. + """ + import tempfile + + # Create a valid self-signed CA certificate for testing + with tempfile.TemporaryDirectory() as tmpdir: + ca_cert_path, _ca_key_path = generate_self_signed_cert(Path(tmpdir), common_name="TestCA") + + # Create TLS config with strict verification + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context - this will succeed (configuration step) + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context requires certificate validation + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Should require certificate verification" + assert ssl_context.check_hostname is True, "Should verify hostname" + + # The key point: When this SSL context is used in a real connection: + # - If server presents a certificate NOT signed by our test CA -> SSLError + # - If server presents an expired certificate -> SSLError + # - If server presents a certificate with wrong hostname -> SSLError + # - If server doesn't present a certificate -> SSLError + # + # This is guaranteed by the CERT_REQUIRED setting and documented in: + # - Python SSL docs: https://docs.python.org/3/library/ssl.html#ssl.CERT_REQUIRED + # - OpenSSL verify docs: https://docs.openssl.org/3.1/man1/openssl-verification-options/ + # - RFC 5280 Section 6: Certificate path validation + + # To demonstrate, we can show that attempting to verify a different certificate + # would fail. Here's what the SSL context will do during handshake: + with patch("ssl.SSLContext.wrap_socket") as mock_wrap: + # Simulate what happens when OpenSSL rejects the certificate + mock_wrap.side_effect = ssl.SSLError("[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed") + + # This is what would happen if we tried to connect to a server + # with an invalid certificate: + with pytest.raises(ssl.SSLError, match="CERTIFICATE_VERIFY_FAILED"): + ssl_context.wrap_socket(Mock(), server_hostname="example.com") + + +def test_ssl_context_accepts_valid_ca_signed_certificate(tmp_path): + """Test that create_ssl_context() accepts certificates signed by a trusted CA. + + This validates that certificate chain validation works correctly when + a proper CA certificate is provided. + + This test validates the actual production code path used in client.py. + """ + # Generate CA and a certificate signed by that CA + ca_cert_path, server_cert_path, server_key_path = generate_ca_and_signed_cert(tmp_path, common_name="valid.example.com") + + # Create TLS config with the CA certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=str(server_cert_path), keyfile=str(server_key_path), verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context is configured for strict validation + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + assert ssl_context.check_hostname is True + + # Verify we can load the certificate successfully + # In a real scenario, this would successfully connect to a server + # presenting a certificate signed by our CA + + +def test_expired_certificate_detection(tmp_path): + """Test that expired certificates can be detected. + + Per OpenSSL docs and RFC 5280: Certificate validity period (notBefore/notAfter) + is automatically checked during validation. This test verifies we can + generate expired certificates that would fail validation. + + This test validates the actual production code path used in client.py. + """ + # Generate an already-expired certificate + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="expired.example.com", expired=True) + + # Load the certificate and verify it's expired + with open(cert_path, "rb") as f: + cert_data = f.read() + cert = x509.load_pem_x509_certificate(cert_data, default_backend()) + + # Verify the certificate is expired + now = datetime.datetime.now(tz=datetime.timezone.utc) + assert cert.not_valid_after_utc < now, "Certificate should be expired" + assert cert.not_valid_before_utc < now, "Certificate notBefore should be in the past" + + # Create TLS config with the expired certificate + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=False) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify the context has verification enabled + assert ssl_context.verify_mode == ssl.CERT_REQUIRED + + # We've verified the certificate is expired - in actual usage, + # create_ssl_context() with CERT_REQUIRED would automatically + # reject this during the TLS handshake + + +def test_certificate_validity_period_future(tmp_path): + """Test detection of certificates that are not yet valid (notBefore in future). + + Per OpenSSL docs: Certificates with notBefore date after current time + are rejected with "certificate is not yet valid" error. + """ + # Generate private key + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend()) + + # Create certificate with notBefore in the future + not_valid_before = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=30) + not_valid_after = datetime.datetime.now(tz=datetime.timezone.utc) + datetime.timedelta(days=395) + + subject = issuer = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "future.example.com")]) + + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(private_key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(not_valid_before) + .not_valid_after(not_valid_after) + .sign(private_key, hashes.SHA256(), default_backend()) + ) + + # Write certificate + cert_path = tmp_path / "future_cert.pem" + cert_path.write_bytes(cert.public_bytes(serialization.Encoding.PEM)) + + # Verify the certificate is not yet valid + now = datetime.datetime.now(tz=datetime.timezone.utc) + assert cert.not_valid_before_utc > now, "Certificate should not yet be valid" + + # In actual usage, ssl.create_default_context() would reject this certificate + # during validation with "certificate is not yet valid" + + +def test_ssl_context_configuration_for_mtls(tmp_path): + """Test that SSL context is properly configured for mTLS. + + This test verifies that the SSL context configuration matches the + security requirements for mutual TLS authentication. + + This test validates the actual production code path used in client.py. + """ + # Generate CA and certificates + ca_cert_path, client_cert_path, client_key_path = generate_ca_and_signed_cert(tmp_path, common_name="client.example.com") + + # Create TLS config for mTLS + tls_config = MCPClientTLSConfig(ca_bundle=str(ca_cert_path), certfile=str(client_cert_path), keyfile=str(client_key_path), verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify security settings + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Should require certificate verification" + assert ssl_context.check_hostname is True, "Should verify hostname by default" + + # Verify protocol restrictions (no SSLv2, SSLv3) + # create_ssl_context() automatically disables weak protocols + assert ssl_context.minimum_version >= ssl.TLSVersion.TLSv1_2, "Should use TLS 1.2 or higher" + + +def test_ssl_context_with_verification_disabled(tmp_path): + """Test SSL context when certificate verification is explicitly disabled. + + When verify=False, the SSL context should allow connections without + certificate validation. This is useful for testing but not recommended + for production. + + This test validates the actual production code path used in client.py. + """ + # Generate self-signed certificate + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="novalidate.example.com") + + # Create TLS config with verification disabled + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=False, check_hostname=False) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify security is disabled as configured + assert ssl_context.verify_mode == ssl.CERT_NONE, "Verification should be disabled" + assert ssl_context.check_hostname is False, "Hostname checking should be disabled" + + +def test_certificate_with_wrong_hostname_would_fail(tmp_path): + """Test that hostname verification would reject certificates with wrong hostname. + + Per Python ssl docs: When check_hostname is enabled, the certificate's + Subject Alternative Name (SAN) or Common Name (CN) must match the hostname. + + This test validates the actual production code path used in client.py. + """ + # Generate certificate for one hostname + cert_path, _key_path = generate_self_signed_cert(tmp_path, common_name="correct.example.com") + + # Load the certificate + with open(cert_path, "rb") as f: + cert_data = f.read() + cert = x509.load_pem_x509_certificate(cert_data, default_backend()) + + # Verify the certificate has the correct hostname in SAN + san_extension = cert.extensions.get_extension_for_oid(ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + san_names = san_extension.value.get_values_for_type(x509.DNSName) + + assert "correct.example.com" in san_names, "Certificate should have correct.example.com in SAN" + assert "wrong.example.com" not in san_names, "Certificate should not have wrong.example.com in SAN" + + # Create TLS config with hostname checking enabled + tls_config = MCPClientTLSConfig(ca_bundle=str(cert_path), certfile=None, keyfile=None, verify=True, check_hostname=True) + + # Create SSL context using the production utility function + ssl_context = create_ssl_context(tls_config, "TestPlugin") + + # Verify hostname checking is enabled + assert ssl_context.check_hostname is True, "Hostname checking should be enabled" + assert ssl_context.verify_mode == ssl.CERT_REQUIRED, "Certificate verification should be required" + + # In actual usage, connecting to "wrong.example.com" with this certificate + # would fail with: ssl.CertificateError: hostname 'wrong.example.com' + # doesn't match 'correct.example.com' diff --git a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py index 130ba510a..058553e4b 100644 --- a/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py +++ b/tests/unit/mcpgateway/plugins/framework/external/mcp/test_client_stdio.py @@ -135,10 +135,9 @@ async def test_client_get_plugin_configs(): all_configs = [] configs = await session.call_tool("get_plugin_configs", {}) for content in configs.content: - confs = json.loads(content.text) - for c in confs: - plugconfig = PluginConfig.model_validate(c) - all_configs.append(plugconfig) + conf = json.loads(content.text) + plugconfig = PluginConfig.model_validate(conf) + all_configs.append(plugconfig) await exit_stack.aclose() assert all_configs[0].name == "SynonymsPlugin" assert all_configs[0].kind == "plugins.regex_filter.search_replace.SearchReplacePlugin" diff --git a/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py b/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py index 3bca0ca66..018c4fdcd 100644 --- a/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py +++ b/tests/unit/mcpgateway/plugins/framework/test_manager_extended.py @@ -547,7 +547,6 @@ async def test_manager_initialization_edge_cases(): # Test plugin instantiation failure (covers lines 495-501) # First-Party - from mcpgateway.plugins.framework.loader.plugin import PluginLoader from mcpgateway.plugins.framework.models import PluginConfig, PluginMode, PluginSettings manager2 = PluginManager() @@ -592,11 +591,6 @@ async def test_manager_initialization_edge_cases(): plugin_settings=PluginSettings() ) - with patch('mcpgateway.plugins.framework.manager.logger') as mock_logger: - await manager3.initialize() - # Disabled plugins are now registered as stubs (info log), not skipped during load - mock_logger.info.assert_any_call("Registered disabled plugin: DisabledPlugin (display only, not instantiated)") - await manager3.shutdown() await manager2.shutdown() diff --git a/tests/unit/mcpgateway/plugins/framework/test_models_tls.py b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py new file mode 100644 index 000000000..d01ba9336 --- /dev/null +++ b/tests/unit/mcpgateway/plugins/framework/test_models_tls.py @@ -0,0 +1,114 @@ +"""Tests for TLS configuration on external MCP plugins.""" + +# Standard +from pathlib import Path + +# Third-Party +import pytest + +# First-Party +from mcpgateway.plugins.framework.models import MCPClientTLSConfig, PluginConfig + + +def _write_pem(path: Path) -> str: + path.write_text("-----BEGIN CERTIFICATE-----\nMIIBszCCAVmgAwIBAgIJALICEFAKE000MA0GCSqGSIb3DQEBCwUAMBQxEjAQBgNV\nBAMMCXRlc3QtY2EwHhcNMjUwMTAxMDAwMDAwWhcNMjYwMTAxMDAwMDAwWjAUMRIw\nEAYDVQQDDAl0ZXN0LWNsaTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEB\nALzM8FSo48ByKC16ecEsPpRghr7kDDLOZWisS+8mHb4RLzdrg5e8tRgFuBlbslUT\n8VE+j54v+J2mOv5u18CVeq4xjp1IqP/PpeL9Z8sY2XohGKVCUj8lMiMM6trXwPh3\n4nDXwG8hxhTZWOeAZv93FqMgBANpUAOC0yM5Ar+uSoC2Tbf3juDEnHiVNWdP6hJg\n38zrla9Yh+SPYj9m6z6wG6jZc37SaJnKI/v4ycq31wkK7S226gRA7i72H+eEt1Kp\nI5rkJ+6kkfgeJc8FvbB6c88T9EycneEW7Pm2Xp6gJdxeN1g2jeDJPnWc5Cj9VPYU\nCJPwy6DnKSmGA4MZij19+cUCAwEAAaNQME4wHQYDVR0OBBYEFL0CyJXw5CtP6Ls9\nVgn8BxwysA2fMB8GA1UdIwQYMBaAFL0CyJXw5CtP6Ls9Vgn8BxwysA2fMAwGA1Ud\nEwQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAIgUjACmJS4cGL7yp0T1vpuZi856\nG7k18Om8Ze9fJbVI1MBBxDWS5F9bNOn5z1ytgCMs9VXg7QibQPXlqprcM2aYJWaV\ndHZ92ohqzJ0EB1G2r8x5Fkw3O0mEWcJvl10FgUVHVGzi552MZGFMZ7DAMA4EAq/u\nsOUgWup8uLSyvvl7dao3rJ8k+YkBWkDu6eCKwQn3nNKFB5Bg9P6IKkmDdLhYodl/\nW1q/qmHZapCp8XDsrmS8skWsmcFJFU6f4VDOwdJaNiMgRGQpWlwO4dRw9xvyhsHc\nsOf0HWNvw60sX6Zav8HC0FzDGhGJkpyyU10BzpQLVEf5AEE7MkK5eeqi2+0=\n-----END CERTIFICATE-----\n", encoding="utf-8") + return str(path) + + +@pytest.mark.parametrize( + "verify", + [True, False], +) +def test_plugin_config_supports_tls_block(tmp_path, verify): + ca_path = Path(tmp_path) / "ca.crt" + client_bundle = Path(tmp_path) / "client.pem" + _write_pem(ca_path) + _write_pem(client_bundle) + + config = PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "ca_bundle": str(ca_path), + "certfile": str(client_bundle), + "verify": verify, + }, + }, + ) + + assert config.mcp is not None + assert config.mcp.tls is not None + assert config.mcp.tls.certfile == str(client_bundle) + assert config.mcp.tls.verify == verify + + +def test_plugin_config_tls_missing_cert_raises(tmp_path): + ca_path = Path(tmp_path) / "ca.crt" + _write_pem(ca_path) + + with pytest.raises(ValueError): + PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "keyfile": str(ca_path), + }, + }, + ) + + +def test_plugin_config_tls_missing_file(tmp_path): + missing_path = Path(tmp_path) / "missing.crt" + + with pytest.raises(ValueError): + PluginConfig( + name="ExternalTLSPlugin", + kind="external", + hooks=["prompt_pre_fetch"], + mcp={ + "proto": "STREAMABLEHTTP", + "url": "https://plugins.internal.example.com/mcp", + "tls": { + "ca_bundle": str(missing_path), + }, + }, + ) + + +def test_tls_config_from_env_defaults(monkeypatch, tmp_path): + ca_path = Path(tmp_path) / "ca.crt" + client_cert = Path(tmp_path) / "client.pem" + _write_pem(ca_path) + _write_pem(client_cert) + + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CA_BUNDLE", str(ca_path)) + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CERTFILE", str(client_cert)) + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_VERIFY", "true") + monkeypatch.setenv("PLUGINS_CLIENT_MTLS_CHECK_HOSTNAME", "true") + + tls_config = MCPClientTLSConfig.from_env() + + assert tls_config is not None + assert tls_config.ca_bundle == str(ca_path) + assert tls_config.certfile == str(client_cert) + assert tls_config.verify is True + assert tls_config.check_hostname is True + + +def test_tls_config_from_env_returns_none(monkeypatch): + monkeypatch.delenv("PLUGINS_MTLS_CA_BUNDLE", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_CERT", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_KEY", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CLIENT_KEY_PASSWORD", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_VERIFY", raising=False) + monkeypatch.delenv("PLUGINS_MTLS_CHECK_HOSTNAME", raising=False) + + assert MCPClientTLSConfig.from_env() is None diff --git a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py index 8d0d161ac..ff391e052 100644 --- a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py +++ b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py @@ -286,7 +286,7 @@ async def test_empty_env_vars(self, echo_script): await endpoint.send("hello world\n") # Wait for response - await asyncio.sleep(0.1) + await asyncio.sleep(0.5) # Check that process was started assert endpoint._proc is not None diff --git a/tests/unit/mcpgateway/tools/__init__.py b/tests/unit/mcpgateway/tools/__init__.py new file mode 100644 index 000000000..eee1aa024 --- /dev/null +++ b/tests/unit/mcpgateway/tools/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor +""" diff --git a/tests/unit/mcpgateway/tools/builder/__init__.py b/tests/unit/mcpgateway/tools/builder/__init__.py new file mode 100644 index 000000000..e63d648ed --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/__init__.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor +""" diff --git a/tests/unit/mcpgateway/tools/builder/test_cli.py b/tests/unit/mcpgateway/tools/builder/test_cli.py new file mode 100644 index 000000000..5328f03c3 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_cli.py @@ -0,0 +1,509 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_cli.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder CLI commands. +""" + +# Standard +import os +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +# Third-Party +import pytest +import typer +from typer.testing import CliRunner + +# First-Party +from mcpgateway.tools.builder.cli import app, main + + +@pytest.fixture +def runner(): + """Create CLI test runner.""" + return CliRunner() + + +@pytest.fixture +def mock_deployer(): + """Create mock deployer instance.""" + deployer = MagicMock() + deployer.validate = MagicMock() + deployer.build = AsyncMock() + deployer.generate_certificates = AsyncMock() + deployer.deploy = AsyncMock() + deployer.verify = AsyncMock() + deployer.destroy = AsyncMock() + deployer.generate_manifests = MagicMock(return_value=Path("/tmp/manifests")) + return deployer + + +class TestCLICallback: + """Test CLI callback initialization.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_default(self, mock_factory, runner): + """Test CLI callback with default options (Python mode by default).""" + mock_deployer = MagicMock() + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["--help"]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_verbose(self, mock_factory, runner): + """Test CLI callback with verbose flag (Python mode by default).""" + mock_deployer = MagicMock() + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["--verbose", "--help"]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_with_dagger(self, mock_factory, runner, tmp_path): + """Test CLI callback with --dagger flag (opt-in).""" + mock_deployer = MagicMock() + mock_deployer.validate = MagicMock() + mock_factory.return_value = (mock_deployer, "dagger") + + config_file = tmp_path / "test-config.yaml" + config_file.write_text("deployment:\n type: compose\n") + + # Use validate command which invokes the callback + result = runner.invoke(app, ["--dagger", "validate", str(config_file)]) + assert result.exit_code == 0 + # Verify dagger mode was requested + mock_factory.assert_called_once_with("dagger", False) + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_cli_callback_default_python(self, mock_factory, runner, tmp_path): + """Test CLI callback defaults to Python mode.""" + mock_deployer = MagicMock() + mock_deployer.validate = MagicMock() + mock_factory.return_value = (mock_deployer, "python") + + config_file = tmp_path / "test-config.yaml" + config_file.write_text("deployment:\n type: compose\n") + + # Use validate command without --dagger flag to test default + result = runner.invoke(app, ["validate", str(config_file)]) + assert result.exit_code == 0 + # Verify python mode was requested (default) + mock_factory.assert_called_once_with("python", False) + + +class TestValidateCommand: + """Test validate command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_validate_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful configuration validation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.validate.return_value = None + + result = runner.invoke(app, ["validate", str(config_file)]) + assert result.exit_code == 0 + assert "Configuration valid" in result.stdout + mock_deployer.validate.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_validate_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test validation failure.""" + config_file = tmp_path / "invalid-config.yaml" + config_file.write_text("invalid: yaml\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.validate.side_effect = ValueError("Invalid configuration") + + result = runner.invoke(app, ["validate", str(config_file)]) + assert result.exit_code == 1 + assert "Validation failed" in result.stdout + + +class TestBuildCommand: + """Test build command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful build.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file)]) + assert result.exit_code == 0 + assert "Build complete" in result.stdout + mock_deployer.build.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_plugins_only(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building only plugins.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file), "--plugins-only"]) + assert result.exit_code == 0 + # Verify plugins_only flag was passed + call_kwargs = mock_deployer.build.call_args[1] + assert call_kwargs["plugins_only"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_specific_plugins(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building specific plugins.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: Plugin1\n - name: Plugin2\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke( + app, ["build", str(config_file), "--plugin", "Plugin1", "--plugin", "Plugin2"] + ) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_no_cache(self, mock_factory, runner, tmp_path, mock_deployer): + """Test building with --no-cache flag.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["build", str(config_file), "--no-cache"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.build.call_args[1] + assert call_kwargs["no_cache"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_build_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test build failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("gateway:\n image: test:latest\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.build.side_effect = RuntimeError("Build failed") + + result = runner.invoke(app, ["build", str(config_file)]) + assert result.exit_code == 1 + assert "Build failed" in result.stdout + + +class TestCertsCommand: + """Test certs command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_certs_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful certificate generation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["certs", str(config_file)]) + assert result.exit_code == 0 + assert "Certificates generated" in result.stdout + mock_deployer.generate_certificates.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_certs_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test certificate generation failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("plugins:\n - name: TestPlugin\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.generate_certificates.side_effect = RuntimeError("Cert generation failed") + + result = runner.invoke(app, ["certs", str(config_file)]) + assert result.exit_code == 1 + assert "Certificate generation failed" in result.stdout + + +class TestDeployCommand: + """Test deploy command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful deployment.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file)]) + assert result.exit_code == 0 + assert "Deployment complete" in result.stdout + mock_deployer.deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_dry_run(self, mock_factory, runner, tmp_path, mock_deployer): + """Test dry-run deployment.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--dry-run"]) + assert result.exit_code == 0 + assert "Dry-run complete" in result.stdout + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["dry_run"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_skip_build(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with --skip-build.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--skip-build"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["skip_build"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_skip_certs(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with --skip-certs.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--skip-certs"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.deploy.call_args[1] + assert call_kwargs["skip_certs"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_custom_output_dir(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment with custom output directory.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + output_dir = tmp_path / "custom-output" + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["deploy", str(config_file), "--output-dir", str(output_dir)]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_deploy_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test deployment failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.deploy.side_effect = RuntimeError("Deployment failed") + + result = runner.invoke(app, ["deploy", str(config_file)]) + assert result.exit_code == 1 + assert "Deployment failed" in result.stdout + + +class TestVerifyCommand: + """Test verify command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful deployment verification.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 0 + assert "Deployment healthy" in result.stdout + mock_deployer.verify.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_with_wait(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification with default wait behavior (wait=True by default).""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Default wait is True, so just run verify without any flags + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.verify.call_args[1] + assert call_kwargs["wait"] is True + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_with_timeout(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification with custom timeout.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["verify", str(config_file), "--timeout", "600"]) + assert result.exit_code == 0 + call_kwargs = mock_deployer.verify.call_args[1] + assert call_kwargs["timeout"] == 600 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_verify_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test verification failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.verify.side_effect = RuntimeError("Verification failed") + + result = runner.invoke(app, ["verify", str(config_file)]) + assert result.exit_code == 1 + assert "Verification failed" in result.stdout + + +class TestDestroyCommand: + """Test destroy command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_with_force(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy with --force flag.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["destroy", str(config_file), "--force"]) + assert result.exit_code == 0 + assert "Deployment destroyed" in result.stdout + mock_deployer.destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_with_confirmation(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy with user confirmation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Simulate user confirming "yes" + result = runner.invoke(app, ["destroy", str(config_file)], input="y\n") + assert result.exit_code == 0 + assert "Deployment destroyed" in result.stdout + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_abort(self, mock_factory, runner, tmp_path, mock_deployer): + """Test aborting destroy command.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + # Simulate user declining "no" + result = runner.invoke(app, ["destroy", str(config_file)], input="n\n") + assert "Aborted" in result.stdout + mock_deployer.destroy.assert_not_called() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_destroy_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test destroy failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.destroy.side_effect = RuntimeError("Destruction failed") + + result = runner.invoke(app, ["destroy", str(config_file), "--force"]) + assert result.exit_code == 1 + assert "Destruction failed" in result.stdout + + +class TestGenerateCommand: + """Test generate command.""" + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_success(self, mock_factory, runner, tmp_path, mock_deployer): + """Test successful manifest generation.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["generate", str(config_file)]) + assert result.exit_code == 0 + assert "Manifests generated" in result.stdout + mock_deployer.generate_manifests.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_with_output_dir(self, mock_factory, runner, tmp_path, mock_deployer): + """Test manifest generation with custom output directory.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + output_dir = tmp_path / "custom-manifests" + + mock_factory.return_value = (mock_deployer, "python") + + result = runner.invoke(app, ["generate", str(config_file), "--output", str(output_dir)]) + assert result.exit_code == 0 + + @patch("mcpgateway.tools.builder.cli.DeployFactory.create_deployer") + def test_generate_failure(self, mock_factory, runner, tmp_path, mock_deployer): + """Test manifest generation failure.""" + config_file = tmp_path / "mcp-stack.yaml" + config_file.write_text("deployment:\n type: compose\n") + + mock_factory.return_value = (mock_deployer, "python") + mock_deployer.generate_manifests.side_effect = ValueError("Generation failed") + + result = runner.invoke(app, ["generate", str(config_file)]) + assert result.exit_code == 1 + assert "Manifest generation failed" in result.stdout + + +class TestVersionCommand: + """Test version command.""" + + def test_version(self, runner): + """Test version command.""" + result = runner.invoke(app, ["version"]) + assert result.exit_code == 0 + assert "MCP Deploy" in result.stdout + assert "Version" in result.stdout + + +class TestMainFunction: + """Test main entry point.""" + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_success(self, mock_app): + """Test successful main execution.""" + mock_app.return_value = None + main() + mock_app.assert_called_once() + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_keyboard_interrupt(self, mock_app): + """Test main with keyboard interrupt.""" + mock_app.side_effect = KeyboardInterrupt() + with pytest.raises(SystemExit) as exc_info: + main() + assert exc_info.value.code == 130 + + @patch("mcpgateway.tools.builder.cli.app") + def test_main_exception_no_debug(self, mock_app): + """Test main with exception (no debug mode).""" + mock_app.side_effect = RuntimeError("Test error") + with pytest.raises(SystemExit) as exc_info: + main() + assert exc_info.value.code == 1 + + @patch("mcpgateway.tools.builder.cli.app") + @patch.dict(os.environ, {"MCP_DEBUG": "1"}) + def test_main_exception_debug_mode(self, mock_app): + """Test main with exception (debug mode enabled).""" + mock_app.side_effect = RuntimeError("Test error") + with pytest.raises(RuntimeError, match="Test error"): + main() diff --git a/tests/unit/mcpgateway/tools/builder/test_common.py b/tests/unit/mcpgateway/tools/builder/test_common.py new file mode 100644 index 000000000..d1d4a7f69 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_common.py @@ -0,0 +1,989 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_common.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder common utilities. +""" + +# Standard +import os +from pathlib import Path +import shutil +import subprocess +from unittest.mock import MagicMock, Mock, patch, mock_open + +# Third-Party +import pytest +import yaml + +# First-Party +from mcpgateway.tools.builder.common import ( + copy_env_template, + deploy_compose, + deploy_kubernetes, + destroy_compose, + destroy_kubernetes, + generate_compose_manifests, + generate_kubernetes_manifests, + generate_plugin_config, + get_deploy_dir, + get_docker_compose_command, + load_config, + run_compose, + verify_compose, + verify_kubernetes, +) + + +class TestGetDeployDir: + """Test get_deploy_dir function.""" + + def test_default_deploy_dir(self): + """Test default deploy directory.""" + with patch.dict(os.environ, {}, clear=True): + result = get_deploy_dir() + assert result == Path("./deploy") + + def test_custom_deploy_dir(self): + """Test custom deploy directory from environment variable.""" + with patch.dict(os.environ, {"MCP_DEPLOY_DIR": "/custom/deploy"}): + result = get_deploy_dir() + assert result == Path("/custom/deploy") + + +class TestLoadConfig: + """Test load_config function.""" + + def test_load_valid_config(self, tmp_path): + """Test loading valid YAML configuration.""" + config_file = tmp_path / "mcp-stack.yaml" + config_data = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + config_file.write_text(yaml.dump(config_data)) + + result = load_config(str(config_file)) + assert result["deployment"]["type"] == "compose" + assert result["gateway"]["image"] == "mcpgateway:latest" + + def test_load_nonexistent_config(self): + """Test loading non-existent configuration file.""" + with pytest.raises(FileNotFoundError, match="Configuration file not found"): + load_config("/nonexistent/config.yaml") + + +class TestGeneratePluginConfig: + """Test generate_plugin_config function.""" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_compose(self, mock_env_class, tmp_path): + """Test generating plugin config for Docker Compose deployment.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "plugins": [ + {"name": "TestPlugin", "port": 8000, "mtls_enabled": True} + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + + # Verify template was called + mock_env.get_template.assert_called_once_with("plugins-config.yaml.j2") + assert result == output_dir / "plugins-config.yaml" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_kubernetes(self, mock_env_class, tmp_path): + """Test generating plugin config for Kubernetes deployment.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "plugins": [ + {"name": "TestPlugin", "port": 8000, "mtls_enabled": False} + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + + # Verify template was called + assert mock_env.get_template.called + assert result == output_dir / "plugins-config.yaml" + + @patch("mcpgateway.tools.builder.common.Environment") + def test_generate_plugin_config_with_overrides(self, mock_env_class, tmp_path): + """Test generating plugin config with plugin_overrides.""" + # Setup mock template + mock_template = MagicMock() + mock_template.render.return_value = "plugins:\n - name: TestPlugin\n" + mock_env = MagicMock() + mock_env.get_template.return_value = mock_template + mock_env_class.return_value = mock_env + + # Create fake template directory + template_dir = tmp_path / "templates" + template_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "plugins": [ + { + "name": "TestPlugin", + "port": 8000, + "plugin_overrides": { + "priority": 10, + "mode": "enforce", + "tags": ["security"], + }, + } + ], + } + + with patch("mcpgateway.tools.builder.common.Path") as mock_path: + mock_path.return_value.__truediv__.return_value = template_dir + output_dir = tmp_path / "output" + output_dir.mkdir() + + result = generate_plugin_config(config, output_dir) + assert result == output_dir / "plugins-config.yaml" + + +class TestCopyEnvTemplate: + """Test copy_env_template function.""" + + def test_copy_env_template_success(self, tmp_path): + """Test successful copying of .env.template.""" + # Create plugin build dir with .env.template + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + template_file = plugin_dir / ".env.template" + template_file.write_text("TEST_VAR=value\n") + + # Setup deploy dir + deploy_dir = tmp_path / "deploy" + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + copy_env_template("TestPlugin", plugin_dir) + + target_file = deploy_dir / "env" / ".env.TestPlugin" + assert target_file.exists() + assert target_file.read_text() == "TEST_VAR=value\n" + + def test_copy_env_template_no_template(self, tmp_path): + """Test when .env.template doesn't exist.""" + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + + deploy_dir = tmp_path / "deploy" + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + # Should not raise error, just skip + copy_env_template("TestPlugin", plugin_dir, verbose=True) + + def test_copy_env_template_target_exists(self, tmp_path): + """Test when target file already exists.""" + # Create plugin build dir with .env.template + plugin_dir = tmp_path / "plugin" + plugin_dir.mkdir() + template_file = plugin_dir / ".env.template" + template_file.write_text("NEW_VAR=newvalue\n") + + # Setup deploy dir with existing target + deploy_dir = tmp_path / "deploy" + deploy_dir.mkdir() + env_dir = deploy_dir / "env" + env_dir.mkdir() + target_file = env_dir / ".env.TestPlugin" + target_file.write_text("OLD_VAR=oldvalue\n") + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + copy_env_template("TestPlugin", plugin_dir) + + # Should not overwrite + assert target_file.read_text() == "OLD_VAR=oldvalue\n" + + +class TestGetDockerComposeCommand: + """Test get_docker_compose_command function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_docker_compose_plugin(self, mock_run, mock_which): + """Test detecting docker compose plugin.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0) + + result = get_docker_compose_command() + assert result == ["docker", "compose"] + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_docker_compose_standalone(self, mock_run, mock_which): + """Test detecting standalone docker-compose.""" + + def which_side_effect(cmd): + if cmd == "docker": + return "/usr/bin/docker" + elif cmd == "docker-compose": + return "/usr/bin/docker-compose" + return None + + mock_which.side_effect = which_side_effect + mock_run.side_effect = subprocess.CalledProcessError(1, "cmd") + + result = get_docker_compose_command() + assert result == ["docker-compose"] + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_docker_compose_not_found(self, mock_which): + """Test when docker compose is not available.""" + mock_which.return_value = None + + with pytest.raises(RuntimeError, match="Docker Compose not found"): + get_docker_compose_command() + + +class TestRunCompose: + """Test run_compose function.""" + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_run_compose_success(self, mock_run, mock_get_cmd, tmp_path): + """Test successful compose command execution.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + + mock_get_cmd.return_value = ["docker", "compose"] + mock_run.return_value = Mock(returncode=0, stdout="Success", stderr="") + + result = run_compose(compose_file, ["ps"]) + assert result.returncode == 0 + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + def test_run_compose_file_not_found(self, mock_get_cmd, tmp_path): + """Test run_compose with non-existent file.""" + compose_file = tmp_path / "nonexistent.yaml" + mock_get_cmd.return_value = ["docker", "compose"] + + with pytest.raises(FileNotFoundError, match="Compose file not found"): + run_compose(compose_file, ["ps"]) + + @patch("mcpgateway.tools.builder.common.get_docker_compose_command") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_run_compose_command_failure(self, mock_run, mock_get_cmd, tmp_path): + """Test run_compose command failure.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + + mock_get_cmd.return_value = ["docker", "compose"] + mock_run.side_effect = subprocess.CalledProcessError( + 1, "cmd", output="", stderr="Error" + ) + + with pytest.raises(RuntimeError, match="Docker Compose failed"): + run_compose(compose_file, ["up", "-d"]) + + +class TestDeployCompose: + """Test deploy_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_deploy_compose_success(self, mock_run, tmp_path): + """Test successful Docker Compose deployment.""" + compose_file = tmp_path / "docker-compose.yaml" + mock_run.return_value = Mock(stdout="Deployed", stderr="") + + deploy_compose(compose_file) + mock_run.assert_called_once_with(compose_file, ["up", "-d"], verbose=False) + + +class TestVerifyCompose: + """Test verify_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_verify_compose(self, mock_run, tmp_path): + """Test verifying Docker Compose deployment.""" + compose_file = tmp_path / "docker-compose.yaml" + mock_run.return_value = Mock(stdout="test-service running", stderr="") + + result = verify_compose(compose_file) + assert "test-service running" in result + mock_run.assert_called_once_with(compose_file, ["ps"], verbose=False, check=False) + + +class TestDestroyCompose: + """Test destroy_compose function.""" + + @patch("mcpgateway.tools.builder.common.run_compose") + def test_destroy_compose_success(self, mock_run, tmp_path): + """Test successful Docker Compose destruction.""" + compose_file = tmp_path / "docker-compose.yaml" + compose_file.write_text("services:\n test: {}\n") + mock_run.return_value = Mock(stdout="Removed", stderr="") + + destroy_compose(compose_file) + mock_run.assert_called_once_with(compose_file, ["down", "-v"], verbose=False) + + def test_destroy_compose_file_not_found(self, tmp_path): + """Test destroying with non-existent compose file.""" + compose_file = tmp_path / "nonexistent.yaml" + + # Should not raise error, just print warning + destroy_compose(compose_file) + + +class TestDeployKubernetes: + """Test deploy_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_deploy_kubernetes_success(self, mock_run, mock_which, tmp_path): + """Test successful Kubernetes deployment.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="created", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "plugins-config.yaml").write_text("plugins: []\n") + + deploy_kubernetes(manifests_dir) + assert mock_run.called + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_deploy_kubernetes_kubectl_not_found(self, mock_which, tmp_path): + """Test deployment when kubectl is not available.""" + mock_which.return_value = None + manifests_dir = tmp_path / "manifests" + + with pytest.raises(RuntimeError, match="kubectl not found"): + deploy_kubernetes(manifests_dir) + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_deploy_kubernetes_with_certs(self, mock_run, mock_which, tmp_path): + """Test Kubernetes deployment with certificate secrets.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="created", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "cert-secrets.yaml").write_text("apiVersion: v1\n") + + deploy_kubernetes(manifests_dir) + assert mock_run.called + + +class TestVerifyKubernetes: + """Test verify_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_verify_kubernetes_success(self, mock_run, mock_which): + """Test successful Kubernetes verification.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock( + returncode=0, stdout="pod-1 Running\npod-2 Running", stderr="" + ) + + result = verify_kubernetes("test-ns") + assert "Running" in result + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_verify_kubernetes_kubectl_not_found(self, mock_which): + """Test verification when kubectl is not available.""" + mock_which.return_value = None + + with pytest.raises(RuntimeError, match="kubectl not found"): + verify_kubernetes("test-ns") + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_verify_kubernetes_with_wait(self, mock_run, mock_which): + """Test Kubernetes verification with wait.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="Ready", stderr="") + + result = verify_kubernetes("test-ns", wait=True, timeout=60) + assert mock_run.call_count >= 1 + + +class TestDestroyKubernetes: + """Test destroy_kubernetes function.""" + + @patch("mcpgateway.tools.builder.common.shutil.which") + @patch("mcpgateway.tools.builder.common.subprocess.run") + def test_destroy_kubernetes_success(self, mock_run, mock_which, tmp_path): + """Test successful Kubernetes destruction.""" + mock_which.return_value = "/usr/bin/kubectl" + mock_run.return_value = Mock(returncode=0, stdout="deleted", stderr="") + + manifests_dir = tmp_path / "manifests" + manifests_dir.mkdir() + (manifests_dir / "gateway-deployment.yaml").write_text("apiVersion: v1\n") + (manifests_dir / "plugins-config.yaml").write_text("plugins: []\n") + + destroy_kubernetes(manifests_dir) + assert mock_run.called + + @patch("mcpgateway.tools.builder.common.shutil.which") + def test_destroy_kubernetes_kubectl_not_found(self, mock_which, tmp_path): + """Test destruction when kubectl is not available.""" + mock_which.return_value = None + manifests_dir = tmp_path / "manifests" + + with pytest.raises(RuntimeError, match="kubectl not found"): + destroy_kubernetes(manifests_dir) + + def test_destroy_kubernetes_dir_not_found(self, tmp_path): + """Test destroying with non-existent manifests directory.""" + manifests_dir = tmp_path / "nonexistent" + + with patch("mcpgateway.tools.builder.common.shutil.which", return_value="/usr/bin/kubectl"): + # Should not raise error, just print warning + destroy_kubernetes(manifests_dir) + + +class TestGenerateKubernetesManifests: + """Test generate_kubernetes_manifests function with real template rendering.""" + + def test_generate_manifests_gateway_only(self, tmp_path): + """Test generating Kubernetes manifests for gateway only.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify gateway deployment was created + gateway_file = output_dir / "gateway-deployment.yaml" + assert gateway_file.exists() + + # Parse and validate YAML + with open(gateway_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Should have Deployment and Service + assert len(docs) >= 2 + + # Validate Deployment + deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert deployment is not None + assert deployment["metadata"]["name"] == "mcpgateway" + assert deployment["metadata"]["namespace"] == "test-ns" + assert deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "mcpgateway:latest" + + # Validate Service + service = next((d for d in docs if d.get("kind") == "Service"), None) + assert service is not None + assert service["metadata"]["name"] == "mcpgateway" + assert service["spec"]["ports"][0]["port"] == 4444 + + def test_generate_manifests_with_plugins(self, tmp_path): + """Test generating Kubernetes manifests with plugins.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "mcp-test"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "TestPlugin", + "image": "test-plugin:v1", + "port": 8000, + "mtls_enabled": False, + }, + { + "name": "AnotherPlugin", + "image": "another-plugin:v2", + "port": 8001, + "mtls_enabled": False, + }, + ], + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify plugin deployments were created + plugin1_file = output_dir / "plugin-testplugin-deployment.yaml" + plugin2_file = output_dir / "plugin-anotherplugin-deployment.yaml" + + assert plugin1_file.exists() + assert plugin2_file.exists() + + # Parse and validate first plugin + with open(plugin1_file) as f: + docs = list(yaml.safe_load_all(f)) + + deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert deployment is not None + assert deployment["metadata"]["name"] == "mcp-plugin-testplugin" + assert deployment["metadata"]["namespace"] == "mcp-test" + assert deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "test-plugin:v1" + + def test_generate_manifests_with_mtls(self, tmp_path): + """Test generating Kubernetes manifests with mTLS enabled.""" + # Change to tmp_path to ensure we have a valid working directory + original_dir = None + try: + original_dir = os.getcwd() + except (FileNotFoundError, OSError): + pass # Current directory doesn't exist + + os.chdir(tmp_path) + + try: + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create fake certificate files in the actual location where the code looks + certs_dir = Path("certs/mcp") + ca_dir = certs_dir / "ca" + gateway_dir = certs_dir / "gateway" + plugin_dir = certs_dir / "plugins" / "SecurePlugin" + + ca_dir.mkdir(parents=True, exist_ok=True) + gateway_dir.mkdir(parents=True, exist_ok=True) + plugin_dir.mkdir(parents=True, exist_ok=True) + + (ca_dir / "ca.crt").write_bytes(b"fake-ca-cert") + (gateway_dir / "client.crt").write_bytes(b"fake-gateway-cert") + (gateway_dir / "client.key").write_bytes(b"fake-gateway-key") + (plugin_dir / "server.crt").write_bytes(b"fake-plugin-cert") + (plugin_dir / "server.key").write_bytes(b"fake-plugin-key") + + config = { + "deployment": {"type": "kubernetes", "namespace": "secure-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": True, + }, + "plugins": [ + { + "name": "SecurePlugin", + "image": "secure-plugin:v1", + "port": 8000, + "mtls_enabled": True, + } + ], + } + + generate_kubernetes_manifests(config, output_dir) + finally: + # Clean up created certificate files + if Path("certs").exists(): + shutil.rmtree("certs") + + # Restore original directory if it exists + if original_dir and Path(original_dir).exists(): + os.chdir(original_dir) + + # Verify certificate secrets were created + cert_secrets_file = output_dir / "cert-secrets.yaml" + assert cert_secrets_file.exists() + + # Parse and validate secrets + with open(cert_secrets_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Should have secrets for CA, gateway, and plugin + secrets = [d for d in docs if d.get("kind") == "Secret"] + assert len(secrets) >= 2 # At least gateway and plugin secrets + + def test_generate_manifests_with_infrastructure(self, tmp_path): + """Test generating Kubernetes manifests with PostgreSQL and Redis.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "kubernetes", "namespace": "infra-ns"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + "infrastructure": { + "postgres": { + "enabled": True, + "image": "postgres:17", + "database": "testdb", + "user": "testuser", + "password": "testpass", + }, + "redis": { + "enabled": True, + "image": "redis:alpine", + }, + }, + } + + generate_kubernetes_manifests(config, output_dir) + + # Verify infrastructure manifests were created + postgres_file = output_dir / "postgres-deployment.yaml" + redis_file = output_dir / "redis-deployment.yaml" + + assert postgres_file.exists() + assert redis_file.exists() + + # Parse and validate PostgreSQL + with open(postgres_file) as f: + docs = list(yaml.safe_load_all(f)) + + postgres_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert postgres_deployment is not None + assert postgres_deployment["metadata"]["name"] == "postgres" + assert postgres_deployment["spec"]["template"]["spec"]["containers"][0]["image"] == "postgres:17" + + # Parse and validate Redis + with open(redis_file) as f: + docs = list(yaml.safe_load_all(f)) + + redis_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert redis_deployment is not None + assert redis_deployment["metadata"]["name"] == "redis" + + # Verify gateway has database environment variables in Secret + gateway_file = output_dir / "gateway-deployment.yaml" + with open(gateway_file) as f: + docs = list(yaml.safe_load_all(f)) + + # Find the Secret containing environment variables + secret = next((d for d in docs if d.get("kind") == "Secret" and d["metadata"]["name"] == "mcpgateway-env"), None) + assert secret is not None + assert "stringData" in secret + + string_data = secret["stringData"] + + # Check DATABASE_URL is set + assert "DATABASE_URL" in string_data + assert "postgresql://" in string_data["DATABASE_URL"] + assert "testuser:testpass" in string_data["DATABASE_URL"] + + # Check REDIS_URL is set + assert "REDIS_URL" in string_data + assert "redis://redis:6379" in string_data["REDIS_URL"] + + # Verify deployment references the Secret via envFrom + gateway_deployment = next((d for d in docs if d.get("kind") == "Deployment"), None) + assert gateway_deployment is not None + env_from = gateway_deployment["spec"]["template"]["spec"]["containers"][0]["envFrom"] + assert any(ref.get("secretRef", {}).get("name") == "mcpgateway-env" for ref in env_from) + + +class TestGenerateComposeManifests: + """Test generate_compose_manifests function with real template rendering.""" + + def test_generate_compose_gateway_only(self, tmp_path): + """Test generating Docker Compose manifest for gateway only.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose", "project_name": "test-mcp"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Verify compose file was created + compose_file = output_dir / "docker-compose.yaml" + assert compose_file.exists() + + # Parse and validate + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + assert "services" in compose_data + assert "mcpgateway" in compose_data["services"] + + gateway = compose_data["services"]["mcpgateway"] + assert gateway["image"] == "mcpgateway:latest" + assert gateway["ports"] == ["4444:4444"] + + def test_generate_compose_with_plugins(self, tmp_path): + """Test generating Docker Compose manifest with plugins.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose", "project_name": "mcp-stack"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "Plugin1", + "image": "plugin1:v1", + "port": 8000, + "expose_port": True, + "host_port": 8000, + "mtls_enabled": False, + }, + { + "name": "Plugin2", + "image": "plugin2:v1", + "port": 8001, + "expose_port": False, + "mtls_enabled": False, + }, + ], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify plugins are in services + assert "plugin1" in compose_data["services"] + assert "plugin2" in compose_data["services"] + + plugin1 = compose_data["services"]["plugin1"] + assert plugin1["image"] == "plugin1:v1" + assert "8000:8000" in plugin1["ports"] # Exposed + + plugin2 = compose_data["services"]["plugin2"] + assert plugin2["image"] == "plugin2:v1" + # Plugin2 should not have host port mapping since expose_port is False + + def test_generate_compose_with_mtls(self, tmp_path): + """Test generating Docker Compose manifest with mTLS certificates.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create fake certificate structure + certs_dir = tmp_path / "certs" / "mcp" + ca_dir = certs_dir / "ca" + gateway_dir = certs_dir / "gateway" + plugin_dir = certs_dir / "plugins" / "SecurePlugin" + + ca_dir.mkdir(parents=True) + gateway_dir.mkdir(parents=True) + plugin_dir.mkdir(parents=True) + + (ca_dir / "ca.crt").write_text("fake-ca") + (gateway_dir / "client.crt").write_text("fake-cert") + (gateway_dir / "client.key").write_text("fake-key") + (plugin_dir / "server.crt").write_text("fake-plugin-cert") + (plugin_dir / "server.key").write_text("fake-plugin-key") + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "host_port": 4444, + "mtls_enabled": True, + }, + "plugins": [ + { + "name": "SecurePlugin", + "image": "secure:v1", + "port": 8000, + "mtls_enabled": True, + } + ], + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify gateway has certificate volumes + gateway = compose_data["services"]["mcpgateway"] + assert "volumes" in gateway + # Should have volume mounts for certificates + volumes = gateway["volumes"] + assert any("certs" in str(v) or "ca.crt" in str(v) for v in volumes) + + # Verify plugin has certificate volumes + plugin = compose_data["services"]["secureplugin"] + assert "volumes" in plugin + + def test_generate_compose_with_env_files(self, tmp_path): + """Test generating Docker Compose manifest with environment files.""" + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + # Create env files + deploy_dir = tmp_path / "deploy" + env_dir = deploy_dir / "env" + env_dir.mkdir(parents=True) + (env_dir / ".env.gateway").write_text("GATEWAY_VAR=value1\n") + (env_dir / ".env.TestPlugin").write_text("PLUGIN_VAR=value2\n") + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [ + { + "name": "TestPlugin", + "image": "test:v1", + "port": 8000, + "mtls_enabled": False, + } + ], + } + + with patch("mcpgateway.tools.builder.common.get_deploy_dir", return_value=deploy_dir): + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify env_file is set + gateway = compose_data["services"]["mcpgateway"] + assert "env_file" in gateway + + plugin = compose_data["services"]["testplugin"] + assert "env_file" in plugin + + def test_generate_compose_with_infrastructure(self, tmp_path): + """Test generating Docker Compose manifest with PostgreSQL and Redis. + + Note: Currently the template uses hardcoded infrastructure images/config. + Infrastructure customization is not yet implemented for Docker Compose. + """ + output_dir = tmp_path / "manifests" + output_dir.mkdir() + + config = { + "deployment": {"type": "compose"}, + "gateway": { + "image": "mcpgateway:latest", + "port": 4444, + "mtls_enabled": False, + }, + "plugins": [], + "infrastructure": { + "postgres": { + "enabled": True, + "image": "postgres:17", + "database": "mcpdb", + "user": "mcpuser", + "password": "secret123", + }, + "redis": { + "enabled": True, + "image": "redis:7-alpine", + }, + }, + } + + with patch("mcpgateway.tools.builder.common.Path.cwd", return_value=tmp_path): + generate_compose_manifests(config, output_dir) + + # Parse and validate + compose_file = output_dir / "docker-compose.yaml" + with open(compose_file) as f: + compose_data = yaml.safe_load(f) + + # Verify PostgreSQL service exists + # Note: Template uses hardcoded "postgres:17" and "mcp" database + assert "postgres" in compose_data["services"] + postgres = compose_data["services"]["postgres"] + assert postgres["image"] == "postgres:17" # Hardcoded in template + assert "environment" in postgres + + # Verify database name is "mcp" (hardcoded default, not "mcpdb" from config) + env = postgres["environment"] + if isinstance(env, list): + assert any("POSTGRES_DB=mcp" in str(e) for e in env) + else: + assert env["POSTGRES_DB"] == "mcp" + + # Verify Redis service exists + # Note: Template uses hardcoded "redis:latest" + assert "redis" in compose_data["services"] + redis = compose_data["services"]["redis"] + assert redis["image"] == "redis:latest" # Hardcoded in template + + # Verify gateway has database environment variables + gateway = compose_data["services"]["mcpgateway"] + assert "environment" in gateway + env = gateway["environment"] + + # Should have DATABASE_URL with default values + if isinstance(env, list): + db_url = next((e for e in env if "DATABASE_URL" in str(e)), None) + else: + db_url = env.get("DATABASE_URL") + assert db_url is not None + assert "postgresql://" in str(db_url) diff --git a/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py b/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py new file mode 100644 index 000000000..bc0f8ee87 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_dagger_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for Dagger-based MCP Stack deployment. + +These tests are skipped if Dagger is not installed. +""" + +# Standard +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock, Mock, patch + +# Third-Party +import pytest + +# Check if dagger is available +try: + import dagger + + DAGGER_AVAILABLE = True +except ImportError: + DAGGER_AVAILABLE = False + +# Skip all tests in this module if Dagger is not available +pytestmark = pytest.mark.skipif(not DAGGER_AVAILABLE, reason="Dagger not installed") + +# Conditional import to avoid errors when Dagger is not installed +if DAGGER_AVAILABLE: + # First-Party + from mcpgateway.tools.builder.dagger_deploy import MCPStackDagger +else: + # Create a dummy class to avoid NameError in decorators + MCPStackDagger = type("MCPStackDagger", (), {}) + + +@pytest.fixture +def mock_dagger_connection(tmp_path): + """Fixture to mock Dagger connection and dag.""" + with patch("mcpgateway.tools.builder.dagger_deploy.dagger.connection") as mock_conn: + with patch("mcpgateway.tools.builder.dagger_deploy.dag") as mock_dag: + with patch("mcpgateway.tools.builder.dagger_deploy.Path.cwd") as mock_cwd: + # Mock Path.cwd() to return a valid temporary directory + mock_cwd.return_value = tmp_path + + # Mock the async context manager + mock_conn_ctx = AsyncMock() + mock_conn.return_value = mock_conn_ctx + mock_conn_ctx.__aenter__.return_value = None + mock_conn_ctx.__aexit__.return_value = None + + # Setup dag mocks (use regular Mock for synchronous Dagger API) + mock_git = Mock() + mock_tree = Mock() + mock_container = Mock() + mock_container.export_image = AsyncMock() # Only export_image is async + mock_host = Mock() + mock_dir = Mock() + mock_dir.export = AsyncMock() # export is async + + # Set up the method chain for git operations + mock_dag.git.return_value = mock_git + mock_git.branch.return_value = mock_git + mock_git.tree.return_value = mock_tree + mock_tree.docker_build.return_value = mock_container + + # Set up container operations + mock_dag.container.return_value = mock_container + mock_container.from_.return_value = mock_container + mock_container.with_exec.return_value = mock_container + mock_container.with_mounted_directory.return_value = mock_container + mock_container.with_workdir.return_value = mock_container + mock_container.directory.return_value = mock_dir + + # Set up host operations + mock_dag.host.return_value = mock_host + mock_host.directory.return_value = mock_dir + + yield {"connection": mock_conn, "dag": mock_dag, "container": mock_container} + + +class TestMCPStackDaggerInit: + """Test MCPStackDagger initialization.""" + + def test_init_default(self): + """Test default initialization.""" + stack = MCPStackDagger() + assert stack.verbose is False + + def test_init_verbose(self): + """Test initialization with verbose flag.""" + stack = MCPStackDagger(verbose=True) + assert stack.verbose is True + + +class TestMCPStackDaggerBuild: + """Test MCPStackDagger build method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_gateway_only(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building gateway container with Dagger.""" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git", "ref": "main"}, + "plugins": [], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml") + + mock_load.assert_called_once_with("test-config.yaml") + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_plugins_only(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building only plugins.""" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"} + ], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml", plugins_only=True) + + mock_load.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_specific_plugins(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building specific plugins only.""" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"}, + {"name": "Plugin2", "repo": "https://github.com/test/plugin2.git"}, + ], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.build("test-config.yaml", specific_plugins=["Plugin1"]) + + mock_load.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_build_no_plugins(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test building when no plugins are defined.""" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + # Should not raise error + await stack.build("test-config.yaml", plugins_only=True) + + +class TestMCPStackDaggerGenerateCertificates: + """Test MCPStackDagger generate_certificates method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @pytest.mark.asyncio + async def test_generate_certificates(self, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test certificate generation with Dagger.""" + mock_load.return_value = { + "plugins": [ + {"name": "Plugin1"}, + {"name": "Plugin2"}, + ] + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.generate_certificates("test-config.yaml") + + mock_load.assert_called_once() + + +class TestMCPStackDaggerDeploy: + """Test MCPStackDagger deploy method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "build") + @patch.object(MCPStackDagger, "generate_certificates") + @patch.object(MCPStackDagger, "generate_manifests") + @patch.object(MCPStackDagger, "_deploy_compose") + @pytest.mark.asyncio + async def test_deploy_compose_full( + self, mock_deploy, mock_gen_manifests, mock_certs, mock_build, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path + ): + """Test full Docker Compose deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"repo": "https://github.com/test/gateway.git", "mtls_enabled": True}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml") + + mock_build.assert_called_once() + mock_certs.assert_called_once() + mock_gen_manifests.assert_called_once() + mock_deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_dry_run(self, mock_gen_manifests, mock_load, mock_dagger_connection, tmp_path): + """Test dry-run deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True, skip_certs=True) + + mock_gen_manifests.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "generate_manifests") + @patch.object(MCPStackDagger, "_deploy_kubernetes") + @pytest.mark.asyncio + async def test_deploy_kubernetes(self, mock_deploy, mock_gen_manifests, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": False}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.deploy("test-config.yaml", skip_build=True, skip_certs=True) + + mock_deploy.assert_called_once() + + +class TestMCPStackDaggerVerify: + """Test MCPStackDagger verify method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_verify_kubernetes") + @pytest.mark.asyncio + async def test_verify_kubernetes(self, mock_verify_kubernetes, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment verification with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"} + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.verify("test-config.yaml") + + mock_verify_kubernetes.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_verify_compose") + @pytest.mark.asyncio + async def test_verify_compose(self, mock_verify_compose, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Docker Compose deployment verification with Dagger.""" + mock_load.return_value = {"deployment": {"type": "compose"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.verify("test-config.yaml") + + mock_verify_compose.assert_called_once() + + +class TestMCPStackDaggerDestroy: + """Test MCPStackDagger destroy method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_destroy_kubernetes") + @pytest.mark.asyncio + async def test_destroy_kubernetes(self, mock_destroy_kubernetes, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Kubernetes deployment destruction with Dagger.""" + mock_load.return_value = {"deployment": {"type": "kubernetes"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.destroy("test-config.yaml") + + mock_destroy_kubernetes.assert_called_once() + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch.object(MCPStackDagger, "_destroy_compose") + @pytest.mark.asyncio + async def test_destroy_compose(self, mock_destroy_compose, mock_load, mock_get_deploy, mock_dagger_connection, tmp_path): + """Test Docker Compose deployment destruction with Dagger.""" + mock_load.return_value = {"deployment": {"type": "compose"}} + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + await stack.destroy("test-config.yaml") + + mock_destroy_compose.assert_called_once() + + +class TestMCPStackDaggerGenerateManifests: + """Test MCPStackDagger generate_manifests method.""" + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_kubernetes_manifests") + def test_generate_manifests_kubernetes( + self, mock_k8s_gen, mock_plugin_gen, mock_load, tmp_path + ): + """Test generating Kubernetes manifests with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackDagger() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_k8s_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.dagger_deploy.generate_compose_manifests") + def test_generate_manifests_compose( + self, mock_compose_gen, mock_plugin_gen, mock_load, tmp_path + ): + """Test generating Docker Compose manifests with Dagger.""" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackDagger() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_compose_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.dagger_deploy.get_deploy_dir") + @patch("mcpgateway.tools.builder.dagger_deploy.load_config") + def test_generate_manifests_invalid_type(self, mock_load, mock_get_deploy, tmp_path): + """Test generating manifests with invalid deployment type.""" + mock_load.return_value = { + "deployment": {"type": "invalid"}, + "gateway": {"image": "mcpgateway:latest"}, + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackDagger() + with pytest.raises(ValueError, match="Unsupported deployment type"): + stack.generate_manifests("test-config.yaml") + + +class TestMCPStackDaggerBuildComponent: + """Test MCPStackDagger _build_component_with_dagger method.""" + + @pytest.mark.asyncio + async def test_build_component_basic(self, mock_dagger_connection, tmp_path): + """Test basic component build with Dagger.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "containerfile": "Containerfile", + "image": "test-component:latest", + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test-component") + + # Verify Dagger operations were called (using mocks from fixture) + mock_dag = mock_dagger_connection["dag"] + mock_dag.git.assert_called_once() + + # Get the mock git object + mock_git = mock_dag.git.return_value + mock_git.branch.assert_called_with("main") + + # Get the mock tree object + mock_tree = mock_git.tree.return_value + mock_tree.docker_build.assert_called_once() + + @pytest.mark.asyncio + async def test_build_component_with_target(self, mock_dagger_connection, tmp_path): + """Test component build with multi-stage target.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "image": "test:latest", + "target": "production", + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test") + + # Verify docker_build was called with target parameter + mock_dag = mock_dagger_connection["dag"] + mock_git = mock_dag.git.return_value + mock_tree = mock_git.tree.return_value + call_args = mock_tree.docker_build.call_args + assert "target" in call_args[1] or call_args[0] + + @pytest.mark.asyncio + async def test_build_component_with_env_vars(self, mock_dagger_connection, tmp_path): + """Test component build with environment variables.""" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "image": "test:latest", + "env_vars": {"BUILD_ENV": "production", "VERSION": "1.0"}, + } + + stack = MCPStackDagger() + await stack._build_component_with_dagger(component, "test") + + # Verify docker_build was called + mock_dag = mock_dagger_connection["dag"] + mock_git = mock_dag.git.return_value + mock_tree = mock_git.tree.return_value + mock_tree.docker_build.assert_called_once() diff --git a/tests/unit/mcpgateway/tools/builder/test_python_deploy.py b/tests/unit/mcpgateway/tools/builder/test_python_deploy.py new file mode 100644 index 000000000..3e8981268 --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_python_deploy.py @@ -0,0 +1,468 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_python_deploy.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for plain Python MCP Stack deployment. +""" + +# Standard +from pathlib import Path +import subprocess +from unittest.mock import MagicMock, Mock, patch, call + +# Third-Party +import pytest + +# First-Party +from mcpgateway.tools.builder.python_deploy import MCPStackPython + + +class TestMCPStackPython: + """Test MCPStackPython deployment class.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_with_docker(self, mock_which): + """Test initialization with Docker runtime.""" + mock_which.return_value = "/usr/bin/docker" + stack = MCPStackPython() + assert stack.container_runtime == "docker" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_with_podman(self, mock_which): + """Test initialization with Podman runtime.""" + + def which_side_effect(cmd): + if cmd == "docker": + return None + elif cmd == "podman": + return "/usr/bin/podman" + return None + + mock_which.side_effect = which_side_effect + stack = MCPStackPython() + assert stack.container_runtime == "podman" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_init_no_runtime(self, mock_which): + """Test initialization when no container runtime available.""" + mock_which.return_value = None + with pytest.raises(RuntimeError, match="No container runtime found"): + MCPStackPython() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_build_component") + @pytest.mark.asyncio + async def test_build_gateway(self, mock_build, mock_load, mock_which): + """Test building gateway container.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git", "ref": "main"}, + "plugins": [], + } + + stack = MCPStackPython() + await stack.build("test-config.yaml") + + mock_build.assert_called_once() + assert mock_build.call_args[0][1] == "gateway" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_build_plugins_only(self, mock_load, mock_which): + """Test building only plugins.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"repo": "https://github.com/test/gateway.git"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"} + ], + } + + stack = MCPStackPython() + with patch.object(stack, "_build_component") as mock_build: + await stack.build("test-config.yaml", plugins_only=True) + + # Gateway should not be built + calls = [call_args[0][1] for call_args in mock_build.call_args_list] + assert "gateway" not in calls + assert "Plugin1" in calls + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_build_component") + @pytest.mark.asyncio + async def test_build_specific_plugins(self, mock_build, mock_load, mock_which): + """Test building specific plugins only.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [ + {"name": "Plugin1", "repo": "https://github.com/test/plugin1.git"}, + {"name": "Plugin2", "repo": "https://github.com/test/plugin2.git"}, + {"name": "Plugin3", "repo": "https://github.com/test/plugin3.git"}, + ], + } + + stack = MCPStackPython() + await stack.build("test-config.yaml", specific_plugins=["Plugin1", "Plugin3"]) + + # Should only build Plugin1 and Plugin3 + calls = [call_args[0][1] for call_args in mock_build.call_args_list] + assert "Plugin1" in calls + assert "Plugin3" in calls + assert "Plugin2" not in calls + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_build_no_plugins(self, mock_load, mock_which): + """Test building when no plugins are defined.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + # Should not raise error + await stack.build("test-config.yaml", plugins_only=True) + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.shutil.which", return_value="/usr/bin/make") + @patch.object(MCPStackPython, "_run_command") + @pytest.mark.asyncio + async def test_generate_certificates(self, mock_run, mock_make, mock_load, mock_which_runtime): + """Test certificate generation.""" + mock_which_runtime.return_value = "/usr/bin/docker" + mock_load.return_value = { + "plugins": [ + {"name": "Plugin1"}, + {"name": "Plugin2"}, + ] + } + + stack = MCPStackPython() + await stack.generate_certificates("test-config.yaml") + + # Should call make commands for CA, gateway, and each plugin + assert mock_run.call_count == 4 # CA + gateway + 2 plugins + + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @pytest.mark.asyncio + async def test_generate_certificates_make_not_found(self, mock_load): + """Test certificate generation when make is not available.""" + mock_load.return_value = {"plugins": []} + + # Patch shutil.which to return docker for __init__, then None for make check + with patch("mcpgateway.tools.builder.python_deploy.shutil.which") as mock_which: + # First call returns docker (for __init__), subsequent calls return None (for make check) + mock_which.side_effect = ["/usr/bin/docker", None] + + stack = MCPStackPython(verbose=True) + + with pytest.raises(RuntimeError, match="'make' command not found"): + await stack.generate_certificates("test-config.yaml") + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "build") + @patch.object(MCPStackPython, "generate_certificates") + @patch.object(MCPStackPython, "generate_manifests") + @patch.object(MCPStackPython, "_deploy_compose") + @pytest.mark.asyncio + async def test_deploy_compose( + self, mock_deploy, mock_gen_manifests, mock_certs, mock_build, mock_load, mock_which + ): + """Test full compose deployment.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose", "project_name": "test"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": True}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + await stack.deploy("test-config.yaml") + + mock_build.assert_called_once() + mock_certs.assert_called_once() + mock_gen_manifests.assert_called_once() + mock_deploy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "build") + @patch.object(MCPStackPython, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_dry_run(self, mock_gen_manifests, mock_build, mock_load, mock_which): + """Test dry-run deployment.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True, skip_certs=True) + + mock_gen_manifests.assert_called_once() + # Should not call actual deployment + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "generate_manifests") + @pytest.mark.asyncio + async def test_deploy_skip_certs_mtls_disabled(self, mock_gen_manifests, mock_load, mock_which): + """Test deployment with mTLS disabled.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest", "mtls_enabled": False}, + "plugins": [], + } + mock_gen_manifests.return_value = Path("/tmp/manifests") + + stack = MCPStackPython() + with patch.object(stack, "generate_certificates") as mock_certs: + await stack.deploy("test-config.yaml", dry_run=True, skip_build=True) + + # Certificates should not be generated + mock_certs.assert_not_called() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_verify_kubernetes") + @pytest.mark.asyncio + async def test_verify_kubernetes(self, mock_verify, mock_load, mock_which): + """Test Kubernetes deployment verification.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"} + } + + stack = MCPStackPython() + await stack.verify("test-config.yaml") + + mock_verify.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_verify_compose") + @pytest.mark.asyncio + async def test_verify_compose(self, mock_verify, mock_load, mock_which): + """Test Docker Compose deployment verification.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "compose"}} + + stack = MCPStackPython() + await stack.verify("test-config.yaml") + + mock_verify.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_destroy_kubernetes") + @pytest.mark.asyncio + async def test_destroy_kubernetes(self, mock_destroy, mock_load, mock_which): + """Test Kubernetes deployment destruction.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "kubernetes"}} + + stack = MCPStackPython() + await stack.destroy("test-config.yaml") + + mock_destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch.object(MCPStackPython, "_destroy_compose") + @pytest.mark.asyncio + async def test_destroy_compose(self, mock_destroy, mock_load, mock_which): + """Test Docker Compose deployment destruction.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = {"deployment": {"type": "compose"}} + + stack = MCPStackPython() + await stack.destroy("test-config.yaml") + + mock_destroy.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_kubernetes_manifests") + def test_generate_manifests_kubernetes( + self, mock_k8s_gen, mock_plugin_gen, mock_load, mock_which, tmp_path + ): + """Test generating Kubernetes manifests.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "kubernetes", "namespace": "test-ns"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_k8s_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_plugin_config") + @patch("mcpgateway.tools.builder.python_deploy.generate_compose_manifests") + def test_generate_manifests_compose( + self, mock_compose_gen, mock_plugin_gen, mock_load, mock_which, tmp_path + ): + """Test generating Docker Compose manifests.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "compose"}, + "gateway": {"image": "mcpgateway:latest"}, + "plugins": [], + } + + stack = MCPStackPython() + result = stack.generate_manifests("test-config.yaml", output_dir=str(tmp_path)) + + mock_plugin_gen.assert_called_once() + mock_compose_gen.assert_called_once() + assert result == tmp_path + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.load_config") + @patch("mcpgateway.tools.builder.python_deploy.get_deploy_dir") + def test_generate_manifests_invalid_type(self, mock_get_deploy, mock_load, mock_which, tmp_path): + """Test generating manifests with invalid deployment type.""" + mock_which.return_value = "/usr/bin/docker" + mock_load.return_value = { + "deployment": {"type": "invalid"}, + "gateway": {"image": "mcpgateway:latest"}, + } + mock_get_deploy.return_value = tmp_path / "deploy" + + stack = MCPStackPython() + with pytest.raises(ValueError, match="Unsupported deployment type"): + stack.generate_manifests("test-config.yaml") + + +class TestBuildComponent: + """Test _build_component method.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch.object(MCPStackPython, "_run_command") + def test_build_component_clone_new(self, mock_run, mock_which, tmp_path): + """Test building component with new git clone.""" + mock_which.return_value = "/usr/bin/docker" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "context": ".", + "image": "test-component:latest", + } + + # Create Containerfile in expected location + build_dir = tmp_path / "build" / "test-component" + build_dir.mkdir(parents=True) + (build_dir / "Containerfile").write_text("FROM alpine\n") + + stack = MCPStackPython() + + with patch("mcpgateway.tools.builder.python_deploy.Path") as mock_path_class: + mock_path_class.return_value = tmp_path / "build" / "test-component" + # Mock the path checks + with patch.object(Path, "exists", return_value=True): + with patch.object(Path, "__truediv__", return_value=build_dir / "Containerfile"): + stack._build_component(component, "test-component") + + # Verify git clone was called + clone_calls = [c for c in mock_run.call_args_list if "git" in str(c) and "clone" in str(c)] + assert len(clone_calls) > 0 + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + def test_build_component_no_repo(self, mock_which): + """Test building component without repo field.""" + mock_which.return_value = "/usr/bin/docker" + component = {"image": "test:latest"} + + stack = MCPStackPython() + with pytest.raises(ValueError, match="has no 'repo' field"): + stack._build_component(component, "test-component") + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch.object(MCPStackPython, "_run_command") + def test_build_component_with_target(self, mock_run, mock_which, tmp_path): + """Test building component with multi-stage target.""" + mock_which.return_value = "/usr/bin/docker" + component = { + "repo": "https://github.com/test/component.git", + "ref": "main", + "image": "test:latest", + "target": "production", + } + + build_dir = tmp_path / "build" / "test" + build_dir.mkdir(parents=True) + (build_dir / "Containerfile").write_text("FROM alpine\n") + + stack = MCPStackPython() + + with patch("mcpgateway.tools.builder.python_deploy.Path") as mock_path_class: + mock_path_class.return_value = build_dir + with patch.object(Path, "exists", return_value=True): + with patch.object(Path, "__truediv__", return_value=build_dir / "Containerfile"): + stack._build_component(component, "test") + + # Verify --target was included in build command + build_calls = [c for c in mock_run.call_args_list if "docker" in str(c) and "build" in str(c)] + assert len(build_calls) > 0 + + +class TestRunCommand: + """Test _run_command method.""" + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_success(self, mock_run, mock_which): + """Test successful command execution.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0, stdout="Success", stderr="") + + stack = MCPStackPython() + result = stack._run_command(["echo", "test"]) + + assert result.returncode == 0 + mock_run.assert_called_once() + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_failure(self, mock_run, mock_which): + """Test command execution failure.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.side_effect = subprocess.CalledProcessError(1, "cmd") + + stack = MCPStackPython() + with pytest.raises(subprocess.CalledProcessError): + stack._run_command(["false"]) + + @patch("mcpgateway.tools.builder.python_deploy.shutil.which") + @patch("mcpgateway.tools.builder.python_deploy.subprocess.run") + def test_run_command_with_cwd(self, mock_run, mock_which, tmp_path): + """Test command execution with working directory.""" + mock_which.return_value = "/usr/bin/docker" + mock_run.return_value = Mock(returncode=0) + + stack = MCPStackPython() + stack._run_command(["ls"], cwd=tmp_path) + + assert mock_run.call_args[1]["cwd"] == tmp_path diff --git a/tests/unit/mcpgateway/tools/builder/test_schema.py b/tests/unit/mcpgateway/tools/builder/test_schema.py new file mode 100644 index 000000000..86a66e3bc --- /dev/null +++ b/tests/unit/mcpgateway/tools/builder/test_schema.py @@ -0,0 +1,330 @@ +# -*- coding: utf-8 -*- +"""Location: ./tests/unit/mcpgateway/tools/builder/test_schema.py +Copyright 2025 +SPDX-License-Identifier: Apache-2.0 +Authors: Teryl Taylor + +Unit tests for builder schema validation (Pydantic models). +""" + +# Third-Party +import pytest +from pydantic import ValidationError + +# First-Party +from mcpgateway.tools.builder.schema import ( + BuildableConfig, + CertificatesConfig, + DeploymentConfig, + GatewayConfig, + InfrastructureConfig, + MCPStackConfig, + PluginConfig, + PostgresConfig, + RedisConfig, +) + + +class TestDeploymentConfig: + """Test DeploymentConfig validation.""" + + def test_valid_kubernetes_deployment(self): + """Test valid Kubernetes deployment configuration.""" + config = DeploymentConfig(type="kubernetes", namespace="test-ns") + assert config.type == "kubernetes" + assert config.namespace == "test-ns" + assert config.project_name is None + + def test_valid_compose_deployment(self): + """Test valid Docker Compose deployment configuration.""" + config = DeploymentConfig(type="compose", project_name="test-project") + assert config.type == "compose" + assert config.project_name == "test-project" + assert config.namespace is None + + def test_invalid_deployment_type(self): + """Test invalid deployment type.""" + with pytest.raises(ValidationError): + DeploymentConfig(type="invalid") + + +class TestGatewayConfig: + """Test GatewayConfig validation.""" + + def test_gateway_with_image(self): + """Test gateway config with pre-built image.""" + config = GatewayConfig(image="mcpgateway:latest", port=4444) + assert config.image == "mcpgateway:latest" + assert config.port == 4444 + assert config.repo is None + + def test_gateway_with_repo(self): + """Test gateway config with repository build.""" + config = GatewayConfig( + repo="https://github.com/org/repo.git", + ref="main", + context=".", + port=4444 + ) + assert config.repo == "https://github.com/org/repo.git" + assert config.ref == "main" + assert config.image is None + + def test_gateway_without_image_or_repo(self): + """Test that gateway requires either image or repo.""" + with pytest.raises(ValueError, match="must specify either 'image' or 'repo'"): + GatewayConfig(port=4444) + + def test_gateway_defaults(self): + """Test gateway default values.""" + config = GatewayConfig(image="test:latest") + assert config.port == 4444 + assert config.mtls_enabled is True + assert config.ref == "main" + assert config.context == "." + assert config.containerfile == "Containerfile" + + +class TestPluginConfig: + """Test PluginConfig validation.""" + + def test_plugin_with_image(self): + """Test plugin config with pre-built image.""" + config = PluginConfig(name="TestPlugin", image="test:latest") + assert config.name == "TestPlugin" + assert config.image == "test:latest" + assert config.repo is None + + def test_plugin_with_repo(self): + """Test plugin config with repository build.""" + config = PluginConfig( + name="TestPlugin", + repo="https://github.com/org/plugin.git", + ref="v1.0.0", + context="plugins/test" + ) + assert config.name == "TestPlugin" + assert config.repo == "https://github.com/org/plugin.git" + assert config.ref == "v1.0.0" + assert config.context == "plugins/test" + + def test_plugin_without_name(self): + """Test that plugin requires name.""" + with pytest.raises(ValidationError): + PluginConfig(image="test:latest") + + def test_plugin_empty_name(self): + """Test that plugin name cannot be empty.""" + with pytest.raises(ValidationError, match="Plugin name cannot be empty"): + PluginConfig(name="", image="test:latest") + + def test_plugin_whitespace_name(self): + """Test that plugin name cannot be whitespace only.""" + with pytest.raises(ValidationError, match="Plugin name cannot be empty"): + PluginConfig(name=" ", image="test:latest") + + def test_plugin_defaults(self): + """Test plugin default values.""" + config = PluginConfig(name="TestPlugin", image="test:latest") + assert config.port == 8000 + assert config.expose_port is False + assert config.mtls_enabled is True + assert config.plugin_overrides == {} + + def test_plugin_overrides(self): + """Test plugin with overrides.""" + config = PluginConfig( + name="TestPlugin", + image="test:latest", + plugin_overrides={ + "priority": 10, + "mode": "enforce", + "tags": ["security", "filter"] + } + ) + assert config.plugin_overrides["priority"] == 10 + assert config.plugin_overrides["mode"] == "enforce" + assert config.plugin_overrides["tags"] == ["security", "filter"] + + +class TestCertificatesConfig: + """Test CertificatesConfig validation.""" + + def test_certificates_defaults(self): + """Test certificates default values.""" + config = CertificatesConfig() + assert config.validity_days == 825 + assert config.auto_generate is True + assert config.ca_path == "./certs/mcp/ca" + assert config.gateway_path == "./certs/mcp/gateway" + assert config.plugins_path == "./certs/mcp/plugins" + + def test_certificates_custom_values(self): + """Test certificates with custom values.""" + config = CertificatesConfig( + validity_days=365, + auto_generate=False, + ca_path="/custom/ca", + gateway_path="/custom/gateway", + plugins_path="/custom/plugins" + ) + assert config.validity_days == 365 + assert config.auto_generate is False + assert config.ca_path == "/custom/ca" + + +class TestInfrastructureConfig: + """Test InfrastructureConfig validation.""" + + def test_postgres_defaults(self): + """Test PostgreSQL default configuration.""" + config = PostgresConfig() + assert config.enabled is True + assert config.image == "postgres:17" + assert config.database == "mcp" + assert config.user == "postgres" + assert config.password == "mysecretpassword" + assert config.storage_size == "10Gi" + + def test_postgres_custom(self): + """Test PostgreSQL custom configuration.""" + config = PostgresConfig( + enabled=True, + image="postgres:16", + database="customdb", + user="customuser", + password="custompass", + storage_size="20Gi", + storage_class="fast-ssd" + ) + assert config.image == "postgres:16" + assert config.database == "customdb" + assert config.storage_class == "fast-ssd" + + def test_redis_defaults(self): + """Test Redis default configuration.""" + config = RedisConfig() + assert config.enabled is True + assert config.image == "redis:latest" + + def test_infrastructure_defaults(self): + """Test infrastructure with default values.""" + config = InfrastructureConfig() + assert config.postgres.enabled is True + assert config.redis.enabled is True + + +class TestMCPStackConfig: + """Test complete MCPStackConfig validation.""" + + def test_minimal_config(self): + """Test minimal valid configuration.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose", project_name="test"), + gateway=GatewayConfig(image="mcpgateway:latest") + ) + assert config.deployment.type == "compose" + assert config.gateway.image == "mcpgateway:latest" + assert config.plugins == [] + + def test_full_config(self): + """Test full configuration with all options.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="kubernetes", namespace="prod"), + gateway=GatewayConfig( + image="mcpgateway:latest", + port=4444, + mtls_enabled=True + ), + plugins=[ + PluginConfig(name="Plugin1", image="plugin1:latest"), + PluginConfig(name="Plugin2", image="plugin2:latest") + ], + certificates=CertificatesConfig(validity_days=365), + infrastructure=InfrastructureConfig() + ) + assert config.deployment.namespace == "prod" + assert len(config.plugins) == 2 + assert config.certificates.validity_days == 365 + + def test_duplicate_plugin_names(self): + """Test that duplicate plugin names are rejected.""" + with pytest.raises(ValidationError, match="Duplicate plugin names found"): + MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig(image="test:latest"), + plugins=[ + PluginConfig(name="DuplicatePlugin", image="plugin1:latest"), + PluginConfig(name="DuplicatePlugin", image="plugin2:latest") + ] + ) + + def test_unique_plugin_names(self): + """Test that unique plugin names are accepted.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig(image="test:latest"), + plugins=[ + PluginConfig(name="Plugin1", image="plugin1:latest"), + PluginConfig(name="Plugin2", image="plugin2:latest"), + PluginConfig(name="Plugin3", image="plugin3:latest") + ] + ) + assert len(config.plugins) == 3 + assert [p.name for p in config.plugins] == ["Plugin1", "Plugin2", "Plugin3"] + + def test_config_with_repo_builds(self): + """Test configuration with repository builds.""" + config = MCPStackConfig( + deployment=DeploymentConfig(type="compose"), + gateway=GatewayConfig( + repo="https://github.com/org/gateway.git", + ref="v2.0.0" + ), + plugins=[ + PluginConfig( + name="BuiltPlugin", + repo="https://github.com/org/plugin.git", + ref="main", + context="plugins/src" + ) + ] + ) + assert config.gateway.repo is not None + assert config.gateway.ref == "v2.0.0" + assert config.plugins[0].repo is not None + assert config.plugins[0].context == "plugins/src" + + +class TestBuildableConfig: + """Test BuildableConfig base class validation.""" + + def test_mtls_defaults(self): + """Test mTLS default settings.""" + config = GatewayConfig(image="test:latest") + assert config.mtls_enabled is True + + def test_mtls_disabled(self): + """Test mTLS can be disabled.""" + config = GatewayConfig(image="test:latest", mtls_enabled=False) + assert config.mtls_enabled is False + + def test_env_vars(self): + """Test environment variables.""" + config = GatewayConfig( + image="test:latest", + env_vars={"LOG_LEVEL": "DEBUG", "PORT": "4444"} + ) + assert config.env_vars["LOG_LEVEL"] == "DEBUG" + assert config.env_vars["PORT"] == "4444" + + def test_multi_stage_build(self): + """Test multi-stage build target.""" + config = PluginConfig( + name="TestPlugin", + repo="https://github.com/org/plugin.git", + containerfile="Dockerfile", + target="production" + ) + assert config.containerfile == "Dockerfile" + assert config.target == "production"