Compare commits
11 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 7cc283a850 | |||
|
|
2368a39d11 | ||
|
|
0be9ad030a | ||
|
|
febc9bd5b5 | ||
|
|
2d5ae034c9 | ||
|
|
1bc11de465 | ||
|
|
575e16f2fa | ||
|
|
615b05c8d6 | ||
|
|
d1cca04e76 | ||
|
|
90fd0e46f7 | ||
|
|
14a309d7d6 |
18
.env.example
18
.env.example
@@ -1,9 +1,10 @@
|
||||
# Server Configuration
|
||||
NODE_ENV=development
|
||||
PORT=3000
|
||||
PORT=7123
|
||||
DEBUG=false
|
||||
LOG_LEVEL=info
|
||||
MCP_SERVER=http://localhost:3000
|
||||
MCP_SERVER=http://localhost:7123
|
||||
USE_STDIO_TRANSPORT=true
|
||||
|
||||
# Home Assistant Configuration
|
||||
HASS_HOST=http://homeassistant.local:8123
|
||||
@@ -19,11 +20,12 @@ JWT_ALGORITHM=HS256
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_WINDOW=900000
|
||||
RATE_LIMIT_MAX_REQUESTS=100
|
||||
RATE_LIMIT_MAX_AUTH_REQUESTS=5
|
||||
RATE_LIMIT_REGULAR=100
|
||||
RATE_LIMIT_WEBSOCKET=1000
|
||||
|
||||
# CORS Configuration
|
||||
CORS_ORIGINS=http://localhost:3000,http://localhost:8123
|
||||
CORS_ORIGINS=http://localhost:3000,http://localhost:8123,http://homeassistant.local:8123
|
||||
CORS_METHODS=GET,POST,PUT,DELETE,OPTIONS
|
||||
CORS_ALLOWED_HEADERS=Content-Type,Authorization,X-Requested-With
|
||||
CORS_EXPOSED_HEADERS=
|
||||
@@ -48,9 +50,9 @@ MAX_RETRIES=3
|
||||
ANALYSIS_TIMEOUT=30000
|
||||
|
||||
# Speech Features Configuration
|
||||
ENABLE_SPEECH_FEATURES=true
|
||||
ENABLE_WAKE_WORD=true
|
||||
ENABLE_SPEECH_TO_TEXT=true
|
||||
ENABLE_SPEECH_FEATURES=false
|
||||
ENABLE_WAKE_WORD=false
|
||||
ENABLE_SPEECH_TO_TEXT=false
|
||||
WHISPER_MODEL_PATH=/models
|
||||
WHISPER_MODEL_TYPE=base
|
||||
|
||||
@@ -78,9 +80,9 @@ SSE_RECONNECT_TIMEOUT=5000
|
||||
HOT_RELOAD=true
|
||||
|
||||
# Test Configuration (only needed for running tests)
|
||||
TEST_HASS_HOST=http://localhost:8123
|
||||
TEST_HASS_HOST=http://homeassistant.local:8123
|
||||
TEST_HASS_TOKEN=test_token
|
||||
TEST_HASS_SOCKET_URL=ws://localhost:8123/api/websocket
|
||||
TEST_HASS_SOCKET_URL=ws://homeassistant.local:8123/api/websocket
|
||||
TEST_PORT=3001
|
||||
|
||||
# Version
|
||||
|
||||
75
Dockerfile
75
Dockerfile
@@ -4,23 +4,16 @@ FROM node:20-slim as builder
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Install bun
|
||||
RUN npm install -g bun@1.0.25
|
||||
# Install bun with the latest version
|
||||
RUN npm install -g bun@1.0.35
|
||||
|
||||
# Install only the minimal dependencies needed and clean up in the same layer
|
||||
# Install Python and other dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
pulseaudio \
|
||||
alsa-utils \
|
||||
python3-full \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-dev \
|
||||
python3-venv \
|
||||
portaudio19-dev \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/cache/apt/*
|
||||
build-essential \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create and activate virtual environment
|
||||
RUN python3 -m venv /opt/venv
|
||||
@@ -31,24 +24,11 @@ ENV VIRTUAL_ENV="/opt/venv"
|
||||
RUN /opt/venv/bin/python -m pip install --upgrade pip
|
||||
|
||||
# Install Python packages in virtual environment
|
||||
RUN /opt/venv/bin/python -m pip install --no-cache-dir \
|
||||
numpy \
|
||||
sounddevice \
|
||||
openwakeword \
|
||||
faster-whisper \
|
||||
requests
|
||||
RUN /opt/venv/bin/python -m pip install --no-cache-dir numpy scipy
|
||||
|
||||
# Set build-time environment variables
|
||||
ENV NODE_ENV=production \
|
||||
NODE_OPTIONS="--max-old-space-size=2048" \
|
||||
BUN_INSTALL_CACHE=0
|
||||
|
||||
# Copy only package files first
|
||||
# Copy package.json and install dependencies
|
||||
COPY package.json ./
|
||||
|
||||
# Install dependencies with a clean slate
|
||||
RUN rm -rf node_modules .bun bun.lockb && \
|
||||
bun install --no-save
|
||||
RUN bun install --frozen-lockfile || bun install
|
||||
|
||||
# Copy source files and build
|
||||
COPY src ./src
|
||||
@@ -58,23 +38,18 @@ RUN bun build ./src/index.ts --target=bun --minify --outdir=./dist
|
||||
# Create a smaller production image
|
||||
FROM node:20-slim as runner
|
||||
|
||||
# Install bun in production image
|
||||
RUN npm install -g bun@1.0.25
|
||||
# Install bun in production image with the latest version
|
||||
RUN npm install -g bun@1.0.35
|
||||
|
||||
# Install runtime dependencies
|
||||
# Install system dependencies
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
pulseaudio \
|
||||
alsa-utils \
|
||||
libasound2 \
|
||||
libasound2-plugins \
|
||||
python3-full \
|
||||
curl \
|
||||
python3 \
|
||||
python3-pip \
|
||||
python3-dev \
|
||||
python3-venv \
|
||||
portaudio19-dev \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/cache/apt/*
|
||||
alsa-utils \
|
||||
pulseaudio \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Configure ALSA
|
||||
COPY docker/speech/asound.conf /etc/asound.conf
|
||||
@@ -88,19 +63,7 @@ ENV VIRTUAL_ENV="/opt/venv"
|
||||
RUN /opt/venv/bin/python -m pip install --upgrade pip
|
||||
|
||||
# Install Python packages in virtual environment
|
||||
RUN /opt/venv/bin/python -m pip install --no-cache-dir \
|
||||
numpy \
|
||||
sounddevice \
|
||||
openwakeword \
|
||||
faster-whisper \
|
||||
requests
|
||||
|
||||
# Set Python path to use virtual environment
|
||||
ENV PYTHONPATH="/opt/venv/lib/python3.11/site-packages:$PYTHONPATH"
|
||||
|
||||
# Set production environment variables
|
||||
ENV NODE_ENV=production \
|
||||
NODE_OPTIONS="--max-old-space-size=1024"
|
||||
RUN /opt/venv/bin/python -m pip install --no-cache-dir numpy scipy
|
||||
|
||||
# Create a non-root user and add to audio group
|
||||
RUN addgroup --system --gid 1001 nodejs && \
|
||||
@@ -136,4 +99,4 @@ HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
EXPOSE ${PORT:-4000}
|
||||
|
||||
# Start the application with audio setup
|
||||
CMD ["/bin/bash", "-c", "/app/docker/speech/setup-audio.sh & bun --smol run start"]
|
||||
CMD ["/bin/bash", "-c", "/app/docker/speech/setup-audio.sh || echo 'Audio setup failed, continuing anyway' && bun --smol run fix-env.js"]
|
||||
96
PUBLISHING.md
Normal file
96
PUBLISHING.md
Normal file
@@ -0,0 +1,96 @@
|
||||
# Publishing to npm
|
||||
|
||||
This document outlines the steps to publish the Home Assistant MCP server to npm.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. You need an npm account. Create one at [npmjs.com](https://www.npmjs.com/signup) if you don't have one.
|
||||
2. You need to be logged in to npm on your local machine:
|
||||
```bash
|
||||
npm login
|
||||
```
|
||||
3. You need to have all the necessary dependencies installed:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
## Before Publishing
|
||||
|
||||
1. Make sure all tests pass:
|
||||
```bash
|
||||
npm test
|
||||
```
|
||||
|
||||
2. Build all the necessary files:
|
||||
```bash
|
||||
npm run build # Build for Bun
|
||||
npm run build:node # Build for Node.js
|
||||
npm run build:stdio # Build the stdio server
|
||||
```
|
||||
|
||||
3. Update the version number in `package.json` following [semantic versioning](https://semver.org/):
|
||||
- MAJOR version for incompatible API changes
|
||||
- MINOR version for new functionality in a backward-compatible manner
|
||||
- PATCH version for backward-compatible bug fixes
|
||||
|
||||
4. Update the CHANGELOG.md file with the changes in the new version.
|
||||
|
||||
## Publishing
|
||||
|
||||
1. Publish to npm:
|
||||
```bash
|
||||
npm publish
|
||||
```
|
||||
|
||||
If you want to publish a beta version:
|
||||
```bash
|
||||
npm publish --tag beta
|
||||
```
|
||||
|
||||
2. Verify the package is published:
|
||||
```bash
|
||||
npm view homeassistant-mcp
|
||||
```
|
||||
|
||||
## After Publishing
|
||||
|
||||
1. Create a git tag for the version:
|
||||
```bash
|
||||
git tag -a v1.0.0 -m "Version 1.0.0"
|
||||
git push origin v1.0.0
|
||||
```
|
||||
|
||||
2. Create a GitHub release with the same version number and include the changelog.
|
||||
|
||||
## Testing the Published Package
|
||||
|
||||
To test the published package:
|
||||
|
||||
```bash
|
||||
# Install globally
|
||||
npm install -g homeassistant-mcp
|
||||
|
||||
# Run the MCP server
|
||||
homeassistant-mcp
|
||||
|
||||
# Or use npx without installing
|
||||
npx homeassistant-mcp
|
||||
```
|
||||
|
||||
## Unpublishing
|
||||
|
||||
If you need to unpublish a version (only possible within 72 hours of publishing):
|
||||
|
||||
```bash
|
||||
npm unpublish homeassistant-mcp@1.0.0
|
||||
```
|
||||
|
||||
## Publishing a New Version
|
||||
|
||||
1. Update the version in package.json
|
||||
2. Update CHANGELOG.md
|
||||
3. Build all files
|
||||
4. Run tests
|
||||
5. Publish to npm
|
||||
6. Create a git tag
|
||||
7. Create a GitHub release
|
||||
670
README.md
670
README.md
@@ -1,11 +1,246 @@
|
||||
# Home Assistant Model Context Protocol (MCP)
|
||||
|
||||
A standardized protocol for AI assistants to interact with Home Assistant, providing a secure, typed, and extensible interface for controlling smart home devices.
|
||||
|
||||
## Overview
|
||||
|
||||
The Model Context Protocol (MCP) server acts as a bridge between AI models (like Claude, GPT, etc.) and Home Assistant, enabling AI assistants to:
|
||||
|
||||
- Execute commands on Home Assistant devices
|
||||
- Retrieve information about the smart home
|
||||
- Stream responses for long-running operations
|
||||
- Validate parameters and inputs
|
||||
- Provide consistent error handling
|
||||
|
||||
## Features
|
||||
|
||||
- **Modular Architecture** - Clean separation between transport, middleware, and tools
|
||||
- **Typed Interface** - Fully TypeScript typed for better developer experience
|
||||
- **Multiple Transports**:
|
||||
- **Standard I/O** (stdin/stdout) for CLI integration
|
||||
- **HTTP/REST API** with Server-Sent Events support for streaming
|
||||
- **Middleware System** - Validation, logging, timeout, and error handling
|
||||
- **Built-in Tools**:
|
||||
- Light control (brightness, color, etc.)
|
||||
- Climate control (thermostats, HVAC)
|
||||
- More to come...
|
||||
- **Extensible Plugin System** - Easily add new tools and capabilities
|
||||
- **Streaming Responses** - Support for long-running operations
|
||||
- **Parameter Validation** - Using Zod schemas
|
||||
- **Claude & Cursor Integration** - Ready-made utilities for AI assistants
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Node.js 16+
|
||||
- Home Assistant instance (or you can use the mock implementations for testing)
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
cd /data
|
||||
git clone https://git.carriere.cloud/alex/homeassistant-mcp.git
|
||||
|
||||
# Install dependencies
|
||||
cd homeassistant-mcp
|
||||
|
||||
npm install -g bun
|
||||
bun install
|
||||
|
||||
# Build the project
|
||||
npm run build
|
||||
|
||||
```
|
||||
|
||||
### Running the Server
|
||||
|
||||
```bash
|
||||
# Start with standard I/O transport (for AI assistant integration)
|
||||
npm start -- --stdio
|
||||
|
||||
# Start with HTTP transport (for API access)
|
||||
npm start -- --http
|
||||
|
||||
# Start with both transports
|
||||
npm start -- --stdio --http
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
Configure the server using environment variables or a `.env` file:
|
||||
|
||||
```dotenv
|
||||
# Server configuration
|
||||
PORT=3000
|
||||
NODE_ENV=development
|
||||
|
||||
# Execution settings
|
||||
EXECUTION_TIMEOUT=30000
|
||||
STREAMING_ENABLED=true
|
||||
|
||||
# Transport settings
|
||||
USE_STDIO_TRANSPORT=true
|
||||
USE_HTTP_TRANSPORT=true
|
||||
|
||||
# Debug and logging
|
||||
DEBUG_MODE=false
|
||||
DEBUG_STDIO=false
|
||||
DEBUG_HTTP=false
|
||||
SILENT_STARTUP=false
|
||||
|
||||
# CORS settings
|
||||
CORS_ORIGIN=*
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
The MCP server is built with a layered architecture:
|
||||
|
||||
1. **Transport Layer** - Handles communication protocols (stdio, HTTP)
|
||||
2. **Middleware Layer** - Processes requests through a pipeline
|
||||
3. **Tool Layer** - Implements specific functionality
|
||||
4. **Resource Layer** - Manages stateful resources
|
||||
|
||||
### Tools
|
||||
|
||||
Tools are the primary way to add functionality to the MCP server. Each tool:
|
||||
|
||||
- Has a unique name
|
||||
- Accepts typed parameters
|
||||
- Returns typed results
|
||||
- Can stream partial results
|
||||
- Validates inputs and outputs
|
||||
|
||||
Example tool registration:
|
||||
|
||||
```typescript
|
||||
import { LightsControlTool } from "./tools/homeassistant/lights.tool.js";
|
||||
import { ClimateControlTool } from "./tools/homeassistant/climate.tool.js";
|
||||
|
||||
// Register tools
|
||||
server.registerTool(new LightsControlTool());
|
||||
server.registerTool(new ClimateControlTool());
|
||||
```
|
||||
|
||||
### API
|
||||
|
||||
When running with HTTP transport, the server provides a JSON-RPC 2.0 API:
|
||||
|
||||
- `POST /api/mcp/jsonrpc` - Execute a tool
|
||||
- `GET /api/mcp/stream` - Connect to SSE stream for real-time updates
|
||||
- `GET /api/mcp/info` - Get server information
|
||||
- `GET /health` - Health check endpoint
|
||||
|
||||
## Integration with AI Models
|
||||
|
||||
### Claude Integration
|
||||
|
||||
```typescript
|
||||
import { createClaudeToolDefinitions } from "./mcp/index.js";
|
||||
|
||||
// Generate Claude-compatible tool definitions
|
||||
const claudeTools = createClaudeToolDefinitions([
|
||||
new LightsControlTool(),
|
||||
new ClimateControlTool()
|
||||
]);
|
||||
|
||||
// Use with Claude API
|
||||
const messages = [
|
||||
{ role: "user", content: "Turn on the lights in the living room" }
|
||||
];
|
||||
|
||||
const response = await claude.messages.create({
|
||||
model: "claude-3-opus-20240229",
|
||||
messages,
|
||||
tools: claudeTools
|
||||
});
|
||||
```
|
||||
|
||||
### Cursor Integration
|
||||
|
||||
To use the Home Assistant MCP server with Cursor, add the following to your `.cursor/config/config.json` file:
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"homeassistant-mcp": {
|
||||
"command": "bash",
|
||||
"args": ["-c", "cd ${workspaceRoot} && bun run dist/index.js --stdio 2>/dev/null | grep -E '\\{\"jsonrpc\":\"2\\.0\"'"],
|
||||
"env": {
|
||||
"NODE_ENV": "development",
|
||||
"USE_STDIO_TRANSPORT": "true",
|
||||
"DEBUG_STDIO": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
This configuration:
|
||||
1. Runs the MCP server with stdio transport
|
||||
2. Redirects all stderr output to /dev/null
|
||||
3. Uses grep to filter stdout for lines containing `{"jsonrpc":"2.0"`, ensuring clean JSON-RPC output
|
||||
|
||||
#### Troubleshooting Cursor Integration
|
||||
|
||||
If you encounter a "failed to create client" error when using the MCP server with Cursor:
|
||||
|
||||
1. Make sure you're using the correct command and arguments in your Cursor configuration
|
||||
- The bash script approach ensures only valid JSON-RPC messages reach Cursor
|
||||
- Ensure the server is built by running `bun run build` before trying to connect
|
||||
|
||||
2. Ensure the server is properly outputting JSON-RPC messages to stdout:
|
||||
```bash
|
||||
bun run dist/index.js --stdio 2>/dev/null | grep -E '\{"jsonrpc":"2\.0"' > json_only.txt
|
||||
```
|
||||
Then examine json_only.txt to verify it contains only valid JSON-RPC messages.
|
||||
|
||||
3. Make sure grep is installed on your system (it should be available by default on most systems)
|
||||
|
||||
4. Try rebuilding the server with:
|
||||
```bash
|
||||
bun run build
|
||||
```
|
||||
|
||||
5. Enable debug mode by setting `DEBUG_STDIO=true` in the environment variables
|
||||
|
||||
If the issue persists, you can try:
|
||||
1. Restarting Cursor
|
||||
2. Clearing Cursor's cache (Help > Developer > Clear Cache and Reload)
|
||||
3. Using a similar approach with Node.js:
|
||||
```json
|
||||
{
|
||||
"command": "bash",
|
||||
"args": ["-c", "cd ${workspaceRoot} && node dist/index.js --stdio 2>/dev/null | grep -E '\\{\"jsonrpc\":\"2\\.0\"'"]
|
||||
}
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please feel free to submit a Pull Request.
|
||||
|
||||
# MCP Server for Home Assistant 🏠🤖
|
||||
|
||||
[](LICENSE) [](https://bun.sh) [](https://www.typescriptlang.org)
|
||||
[](LICENSE) [](https://bun.sh) [](https://www.typescriptlang.org) [](https://smithery.ai/server/@jango-blockchained/advanced-homeassistant-mcp)
|
||||
|
||||
## Overview 🌐
|
||||
|
||||
MCP (Model Context Protocol) Server is my lightweight integration tool for Home Assistant, providing a flexible interface for device management and automation. It's designed to be fast, secure, and easy to use. Built with Bun for maximum performance.
|
||||
|
||||
## Core Features ✨
|
||||
|
||||
- 🔌 Basic device control via REST API
|
||||
- 📡 WebSocket/Server-Sent Events (SSE) for state updates
|
||||
- 🤖 Simple automation rule management
|
||||
- 🔐 JWT-based authentication
|
||||
- 🔄 Standard I/O (stdio) transport for integration with Claude and other AI assistants
|
||||
|
||||
## Why Bun? 🚀
|
||||
|
||||
I chose Bun as the runtime for several key benefits:
|
||||
@@ -38,66 +273,6 @@ I chose Bun as the runtime for several key benefits:
|
||||
- Compatible with Express/Fastify
|
||||
- Native Node.js APIs
|
||||
|
||||
## Core Features ✨
|
||||
|
||||
- 🔌 Basic device control via REST API
|
||||
- 📡 WebSocket/Server-Sent Events (SSE) for state updates
|
||||
- 🤖 Simple automation rule management
|
||||
- 🔐 JWT-based authentication
|
||||
- 🎤 Optional speech features:
|
||||
- 🗣️ Wake word detection ("hey jarvis", "ok google", "alexa")
|
||||
- 🎯 Speech-to-text using fast-whisper
|
||||
- 🌍 Multiple language support
|
||||
- 🚀 GPU acceleration support
|
||||
|
||||
## System Architecture 📊
|
||||
|
||||
```mermaid
|
||||
flowchart TB
|
||||
subgraph Client["Client Applications"]
|
||||
direction TB
|
||||
Web["Web Interface"]
|
||||
Mobile["Mobile Apps"]
|
||||
Voice["Voice Control"]
|
||||
end
|
||||
|
||||
subgraph MCP["MCP Server"]
|
||||
direction TB
|
||||
API["REST API"]
|
||||
WS["WebSocket/SSE"]
|
||||
Auth["Authentication"]
|
||||
|
||||
subgraph Speech["Speech Processing (Optional)"]
|
||||
direction TB
|
||||
Wake["Wake Word Detection"]
|
||||
STT["Speech-to-Text"]
|
||||
|
||||
subgraph STT_Options["STT Options"]
|
||||
direction LR
|
||||
Whisper["Whisper"]
|
||||
FastWhisper["Fast Whisper"]
|
||||
end
|
||||
|
||||
Wake --> STT
|
||||
STT --> STT_Options
|
||||
end
|
||||
end
|
||||
|
||||
subgraph HA["Home Assistant"]
|
||||
direction TB
|
||||
HASS_API["HASS API"]
|
||||
HASS_WS["HASS WebSocket"]
|
||||
Devices["Smart Devices"]
|
||||
end
|
||||
|
||||
Client --> MCP
|
||||
MCP --> HA
|
||||
HA --> Devices
|
||||
|
||||
style Speech fill:#f9f,stroke:#333,stroke-width:2px
|
||||
style STT_Options fill:#bbf,stroke:#333,stroke-width:1px
|
||||
```
|
||||
|
||||
## Prerequisites 📋
|
||||
|
||||
- 🚀 [Bun runtime](https://bun.sh) (v1.0.26+)
|
||||
@@ -135,21 +310,11 @@ NODE_ENV=production ./scripts/setup-env.sh
|
||||
|
||||
4. Build and launch with Docker:
|
||||
```bash
|
||||
# Build options:
|
||||
# Standard build
|
||||
./docker-build.sh
|
||||
|
||||
# Build with speech support
|
||||
./docker-build.sh --speech
|
||||
|
||||
# Build with speech and GPU support
|
||||
./docker-build.sh --speech --gpu
|
||||
|
||||
# Launch:
|
||||
docker compose up -d
|
||||
|
||||
# With speech features:
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d
|
||||
```
|
||||
|
||||
## Docker Build Options 🐳
|
||||
@@ -213,41 +378,6 @@ Files load in this order:
|
||||
|
||||
Later files override earlier ones.
|
||||
|
||||
## Speech Features Setup 🎤
|
||||
|
||||
### Prerequisites
|
||||
1. 🐳 Docker installed and running
|
||||
2. 🎮 NVIDIA GPU with CUDA (optional)
|
||||
3. 💾 4GB+ RAM (8GB+ recommended)
|
||||
|
||||
### Configuration
|
||||
1. Enable speech in `.env`:
|
||||
```bash
|
||||
ENABLE_SPEECH_FEATURES=true
|
||||
ENABLE_WAKE_WORD=true
|
||||
ENABLE_SPEECH_TO_TEXT=true
|
||||
WHISPER_MODEL_PATH=/models
|
||||
WHISPER_MODEL_TYPE=base
|
||||
```
|
||||
|
||||
2. Choose your STT engine:
|
||||
```bash
|
||||
# For standard Whisper
|
||||
STT_ENGINE=whisper
|
||||
|
||||
# For Fast Whisper (GPU recommended)
|
||||
STT_ENGINE=fast-whisper
|
||||
CUDA_VISIBLE_DEVICES=0 # Set GPU device
|
||||
```
|
||||
|
||||
### Available Models 🤖
|
||||
Choose based on your needs:
|
||||
- `tiny.en`: Fastest, basic accuracy
|
||||
- `base.en`: Good balance (recommended)
|
||||
- `small.en`: Better accuracy, slower
|
||||
- `medium.en`: High accuracy, resource intensive
|
||||
- `large-v2`: Best accuracy, very resource intensive
|
||||
|
||||
## Development 💻
|
||||
|
||||
```bash
|
||||
@@ -291,29 +421,6 @@ bun run start
|
||||
- [Custom Prompts Guide](docs/prompts.md) - Create and customize AI behavior
|
||||
- [Extras & Tools](docs/extras.md) - Additional utilities and advanced features
|
||||
|
||||
### Extra Tools 🛠️
|
||||
|
||||
I've included several powerful tools in the `extra/` directory to enhance your Home Assistant experience:
|
||||
|
||||
1. **Home Assistant Analyzer CLI** (`ha-analyzer-cli.ts`)
|
||||
- Deep automation analysis using AI models
|
||||
- Security vulnerability scanning
|
||||
- Performance optimization suggestions
|
||||
- System health metrics
|
||||
|
||||
2. **Speech-to-Text Example** (`speech-to-text-example.ts`)
|
||||
- Wake word detection
|
||||
- Speech-to-text transcription
|
||||
- Multiple language support
|
||||
- GPU acceleration support
|
||||
|
||||
3. **Claude Desktop Setup** (`claude-desktop-macos-setup.sh`)
|
||||
- Automated Claude Desktop installation for macOS
|
||||
- Environment configuration
|
||||
- MCP integration setup
|
||||
|
||||
See [Extras Documentation](docs/extras.md) for detailed usage instructions and examples.
|
||||
|
||||
## Client Integration 🔗
|
||||
|
||||
### Cursor Integration 🖱️
|
||||
@@ -322,11 +429,12 @@ Add to `.cursor/config/config.json`:
|
||||
{
|
||||
"mcpServers": {
|
||||
"homeassistant-mcp": {
|
||||
"command": "bun",
|
||||
"args": ["run", "start"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"command": "bash",
|
||||
"args": ["-c", "cd ${workspaceRoot} && bun run dist/index.js --stdio 2>/dev/null | grep -E '\\{\"jsonrpc\":\"2\\.0\"'"],
|
||||
"env": {
|
||||
"NODE_ENV": "development"
|
||||
"NODE_ENV": "development",
|
||||
"USE_STDIO_TRANSPORT": "true",
|
||||
"DEBUG_STDIO": "true"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -354,6 +462,83 @@ Windows users can use the provided script:
|
||||
1. Go to `scripts` directory
|
||||
2. Run `start_mcp.cmd`
|
||||
|
||||
## Additional Features
|
||||
|
||||
### Speech Features 🎤
|
||||
|
||||
MCP Server optionally supports speech processing capabilities:
|
||||
- 🗣️ Wake word detection ("hey jarvis", "ok google", "alexa")
|
||||
- 🎯 Speech-to-text using fast-whisper
|
||||
- 🌍 Multiple language support
|
||||
- 🚀 GPU acceleration support
|
||||
|
||||
#### Speech Features Setup
|
||||
|
||||
##### Prerequisites
|
||||
1. 🐳 Docker installed and running
|
||||
2. 🎮 NVIDIA GPU with CUDA (optional)
|
||||
3. 💾 4GB+ RAM (8GB+ recommended)
|
||||
|
||||
##### Configuration
|
||||
1. Enable speech in `.env`:
|
||||
```bash
|
||||
ENABLE_SPEECH_FEATURES=true
|
||||
ENABLE_WAKE_WORD=true
|
||||
ENABLE_SPEECH_TO_TEXT=true
|
||||
WHISPER_MODEL_PATH=/models
|
||||
WHISPER_MODEL_TYPE=base
|
||||
```
|
||||
|
||||
2. Choose your STT engine:
|
||||
```bash
|
||||
# For standard Whisper
|
||||
STT_ENGINE=whisper
|
||||
|
||||
# For Fast Whisper (GPU recommended)
|
||||
STT_ENGINE=fast-whisper
|
||||
CUDA_VISIBLE_DEVICES=0 # Set GPU device
|
||||
```
|
||||
|
||||
##### Available Models 🤖
|
||||
Choose based on your needs:
|
||||
- `tiny.en`: Fastest, basic accuracy
|
||||
- `base.en`: Good balance (recommended)
|
||||
- `small.en`: Better accuracy, slower
|
||||
- `medium.en`: High accuracy, resource intensive
|
||||
- `large-v2`: Best accuracy, very resource intensive
|
||||
|
||||
##### Launch with Speech Features
|
||||
```bash
|
||||
# Build with speech support
|
||||
./docker-build.sh --speech
|
||||
|
||||
# Launch with speech features:
|
||||
docker compose -f docker-compose.yml -f docker-compose.speech.yml up -d
|
||||
```
|
||||
|
||||
### Extra Tools 🛠️
|
||||
|
||||
I've included several powerful tools in the `extra/` directory to enhance your Home Assistant experience:
|
||||
|
||||
1. **Home Assistant Analyzer CLI** (`ha-analyzer-cli.ts`)
|
||||
- Deep automation analysis using AI models
|
||||
- Security vulnerability scanning
|
||||
- Performance optimization suggestions
|
||||
- System health metrics
|
||||
|
||||
2. **Speech-to-Text Example** (`speech-to-text-example.ts`)
|
||||
- Wake word detection
|
||||
- Speech-to-text transcription
|
||||
- Multiple language support
|
||||
- GPU acceleration support
|
||||
|
||||
3. **Claude Desktop Setup** (`claude-desktop-macos-setup.sh`)
|
||||
- Automated Claude Desktop installation for macOS
|
||||
- Environment configuration
|
||||
- MCP integration setup
|
||||
|
||||
See [Extras Documentation](docs/extras.md) for detailed usage instructions and examples.
|
||||
|
||||
## License 📄
|
||||
|
||||
MIT License. See [LICENSE](LICENSE) for details.
|
||||
@@ -361,3 +546,222 @@ MIT License. See [LICENSE](LICENSE) for details.
|
||||
## Author 👨💻
|
||||
|
||||
Created by [jango-blockchained](https://github.com/jango-blockchained)
|
||||
|
||||
## Running with Standard I/O Transport 📝
|
||||
|
||||
MCP Server supports a JSON-RPC 2.0 stdio transport mode for direct integration with AI assistants like Claude:
|
||||
|
||||
### MCP Stdio Features
|
||||
|
||||
✅ **JSON-RPC 2.0 Compatibility**: Full support for the MCP protocol standard
|
||||
✅ **NPX Support**: Run directly without installation using `npx homeassistant-mcp`
|
||||
✅ **Auto Configuration**: Creates necessary directories and default configuration
|
||||
✅ **Cross-Platform**: Works on macOS, Linux, and Windows
|
||||
✅ **Claude Desktop Integration**: Ready to use with Claude Desktop
|
||||
✅ **Parameter Validation**: Automatic validation of tool parameters
|
||||
✅ **Error Handling**: Standardized error codes and handling
|
||||
✅ **Detailed Logging**: Logs to files without polluting stdio
|
||||
|
||||
### Option 1: Using NPX (Easiest)
|
||||
|
||||
Run the MCP server directly without installation using npx:
|
||||
|
||||
```bash
|
||||
# Basic usage
|
||||
npx homeassistant-mcp
|
||||
|
||||
# Or with environment variables
|
||||
HASS_URL=http://your-ha-instance:8123 HASS_TOKEN=your_token npx homeassistant-mcp
|
||||
```
|
||||
|
||||
This will:
|
||||
1. Install the package temporarily
|
||||
2. Automatically run in stdio mode with JSON-RPC 2.0 transport
|
||||
3. Create a logs directory for logging
|
||||
4. Create a default .env file if not present
|
||||
|
||||
Perfect for integration with Claude Desktop or other MCP clients.
|
||||
|
||||
#### Integrating with Claude Desktop
|
||||
|
||||
To use MCP with Claude Desktop:
|
||||
|
||||
1. Open Claude Desktop settings
|
||||
2. Go to the "Advanced" tab
|
||||
3. Under "MCP Server", select "Custom"
|
||||
4. Enter the command: `npx homeassistant-mcp`
|
||||
5. Click "Save"
|
||||
|
||||
Claude will now use the MCP server for Home Assistant integration, allowing you to control your smart home directly through Claude.
|
||||
|
||||
### Option 2: Local Installation
|
||||
|
||||
1. Update your `.env` file to enable stdio transport:
|
||||
```
|
||||
USE_STDIO_TRANSPORT=true
|
||||
```
|
||||
|
||||
2. Run the server using the stdio-start script:
|
||||
```bash
|
||||
./stdio-start.sh
|
||||
```
|
||||
|
||||
Available options:
|
||||
```
|
||||
./stdio-start.sh --debug # Enable debug mode
|
||||
./stdio-start.sh --rebuild # Force rebuild
|
||||
./stdio-start.sh --help # Show help
|
||||
```
|
||||
|
||||
When running in stdio mode:
|
||||
- The server communicates via stdin/stdout using JSON-RPC 2.0 format
|
||||
- No HTTP server is started
|
||||
- Console logging is disabled to avoid polluting the stdio stream
|
||||
- All logs are written to the log files in the `logs/` directory
|
||||
|
||||
### JSON-RPC 2.0 Message Format
|
||||
|
||||
#### Request Format
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "unique-request-id",
|
||||
"method": "tool-name",
|
||||
"params": {
|
||||
"param1": "value1",
|
||||
"param2": "value2"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Response Format
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "unique-request-id",
|
||||
"result": {
|
||||
// Tool-specific result data
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Error Response Format
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"id": "unique-request-id",
|
||||
"error": {
|
||||
"code": -32000,
|
||||
"message": "Error message",
|
||||
"data": {} // Optional error details
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Notification Format (Server to Client)
|
||||
```json
|
||||
{
|
||||
"jsonrpc": "2.0",
|
||||
"method": "notification-type",
|
||||
"params": {
|
||||
// Notification data
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Supported Error Codes
|
||||
|
||||
| Code | Description | Meaning |
|
||||
|---------|--------------------|------------------------------------------|
|
||||
| -32700 | Parse error | Invalid JSON was received |
|
||||
| -32600 | Invalid request | JSON is not a valid request object |
|
||||
| -32601 | Method not found | Method does not exist or is unavailable |
|
||||
| -32602 | Invalid params | Invalid method parameters |
|
||||
| -32603 | Internal error | Internal JSON-RPC error |
|
||||
| -32000 | Tool execution | Error executing the tool |
|
||||
| -32001 | Validation error | Parameter validation failed |
|
||||
|
||||
### Integrating with Claude Desktop
|
||||
|
||||
To use this MCP server with Claude Desktop:
|
||||
|
||||
1. Create or edit your Claude Desktop configuration:
|
||||
```bash
|
||||
# On macOS
|
||||
nano ~/Library/Application\ Support/Claude/claude_desktop_config.json
|
||||
|
||||
# On Linux
|
||||
nano ~/.config/Claude/claude_desktop_config.json
|
||||
|
||||
# On Windows
|
||||
notepad %APPDATA%\Claude\claude_desktop_config.json
|
||||
```
|
||||
|
||||
2. Add the MCP server configuration:
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"homeassistant-mcp": {
|
||||
"command": "npx",
|
||||
"args": ["homeassistant-mcp"],
|
||||
"env": {
|
||||
"HASS_TOKEN": "your_home_assistant_token_here",
|
||||
"HASS_HOST": "http://your_home_assistant_host:8123"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. Restart Claude Desktop.
|
||||
|
||||
4. In Claude, you can now use the Home Assistant MCP tools.
|
||||
|
||||
### JSON-RPC 2.0 Message Format
|
||||
|
||||
## Usage
|
||||
|
||||
### Using NPX (Easiest)
|
||||
|
||||
The simplest way to use the Home Assistant MCP server is through NPX:
|
||||
|
||||
```bash
|
||||
# Start the server in stdio mode
|
||||
npx homeassistant-mcp
|
||||
```
|
||||
|
||||
This will automatically:
|
||||
1. Start the server in stdio mode
|
||||
2. Output JSON-RPC messages to stdout
|
||||
3. Send log messages to stderr
|
||||
4. Create a logs directory if it doesn't exist
|
||||
|
||||
You can redirect stderr to hide logs and only see the JSON-RPC output:
|
||||
|
||||
```bash
|
||||
npx homeassistant-mcp 2>/dev/null
|
||||
```
|
||||
|
||||
### Manual Installation
|
||||
|
||||
If you prefer to install the package globally or locally:
|
||||
|
||||
```bash
|
||||
# Install globally
|
||||
npm install -g homeassistant-mcp
|
||||
|
||||
# Then run
|
||||
homeassistant-mcp
|
||||
```
|
||||
|
||||
Or install locally:
|
||||
|
||||
```bash
|
||||
# Install locally
|
||||
npm install homeassistant-mcp
|
||||
|
||||
# Then run using npx
|
||||
npx homeassistant-mcp
|
||||
```
|
||||
|
||||
### Advanced Usage
|
||||
|
||||
84
bin/mcp-stdio.cjs
Executable file
84
bin/mcp-stdio.cjs
Executable file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const dotenv = require('dotenv');
|
||||
|
||||
/**
|
||||
* MCP Server - Stdio Transport Mode (CommonJS)
|
||||
*
|
||||
* This is the CommonJS entry point for running the MCP server via NPX in stdio mode.
|
||||
* It will directly load the stdio-server.js file which is optimized for the CLI usage.
|
||||
*/
|
||||
|
||||
// Set environment variable for stdio transport
|
||||
process.env.USE_STDIO_TRANSPORT = 'true';
|
||||
|
||||
// Load environment variables from .env file (if exists)
|
||||
try {
|
||||
const envPath = path.resolve(process.cwd(), '.env');
|
||||
if (fs.existsSync(envPath)) {
|
||||
dotenv.config({ path: envPath });
|
||||
} else {
|
||||
// Load .env.example if it exists
|
||||
const examplePath = path.resolve(process.cwd(), '.env.example');
|
||||
if (fs.existsSync(examplePath)) {
|
||||
dotenv.config({ path: examplePath });
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Silent error handling
|
||||
}
|
||||
|
||||
// Ensure logs directory exists
|
||||
try {
|
||||
const logsDir = path.join(process.cwd(), 'logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
}
|
||||
} catch (error) {
|
||||
// Silent error handling
|
||||
}
|
||||
|
||||
// Try to load the server
|
||||
try {
|
||||
// Check for simplified stdio server build first (preferred for CLI usage)
|
||||
const stdioServerPath = path.resolve(__dirname, '../dist/stdio-server.js');
|
||||
|
||||
if (fs.existsSync(stdioServerPath)) {
|
||||
// If we're running in Node.js (not Bun), we need to handle ESM imports differently
|
||||
if (typeof Bun === 'undefined') {
|
||||
// Use dynamic import for ESM modules in CommonJS
|
||||
import(stdioServerPath).catch((err) => {
|
||||
console.error('Failed to import stdio server:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
// In Bun, we can directly require the module
|
||||
require(stdioServerPath);
|
||||
}
|
||||
} else {
|
||||
// Fall back to full server if available
|
||||
const fullServerPath = path.resolve(__dirname, '../dist/index.js');
|
||||
|
||||
if (fs.existsSync(fullServerPath)) {
|
||||
console.warn('Warning: stdio-server.js not found, falling back to index.js');
|
||||
console.warn('For optimal CLI performance, build with "npm run build:stdio"');
|
||||
|
||||
if (typeof Bun === 'undefined') {
|
||||
import(fullServerPath).catch((err) => {
|
||||
console.error('Failed to import server:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
require(fullServerPath);
|
||||
}
|
||||
} else {
|
||||
console.error('Error: No server implementation found. Please build the project first.');
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error starting server:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
41
bin/mcp-stdio.js
Executable file
41
bin/mcp-stdio.js
Executable file
@@ -0,0 +1,41 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* MCP Server - Stdio Transport Mode
|
||||
*
|
||||
* This is the entry point for running the MCP server via NPX in stdio mode.
|
||||
* It automatically configures the server to use JSON-RPC 2.0 over stdin/stdout.
|
||||
*/
|
||||
|
||||
// Set environment variables for stdio transport
|
||||
process.env.USE_STDIO_TRANSPORT = 'true';
|
||||
|
||||
// Import and run the MCP server from the compiled output
|
||||
try {
|
||||
// First make sure required directories exist
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
// Ensure logs directory exists
|
||||
const logsDir = path.join(process.cwd(), 'logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
console.error('Creating logs directory...');
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Get the entry module path
|
||||
const entryPath = require.resolve('../dist/index.js');
|
||||
|
||||
// Print initial message to stderr
|
||||
console.error('Starting MCP server in stdio transport mode...');
|
||||
console.error('Logs will be written to the logs/ directory');
|
||||
console.error('Communication will use JSON-RPC 2.0 format via stdin/stdout');
|
||||
|
||||
// Run the server
|
||||
require(entryPath);
|
||||
} catch (error) {
|
||||
console.error('Failed to start MCP server:', error.message);
|
||||
console.error('If this is your first run, you may need to build the project first:');
|
||||
console.error(' npm run build');
|
||||
process.exit(1);
|
||||
}
|
||||
150
bin/npx-entry.cjs
Executable file
150
bin/npx-entry.cjs
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
// Set environment variable - enable stdio transport
|
||||
process.env.USE_STDIO_TRANSPORT = 'true';
|
||||
|
||||
// Check if we're being called from Cursor (check for Cursor specific env vars)
|
||||
const isCursor = process.env.CURSOR_SESSION || process.env.CURSOR_CHANNEL;
|
||||
|
||||
// For Cursor, we need to ensure consistent stdio handling
|
||||
if (isCursor) {
|
||||
// Essential for Cursor compatibility
|
||||
process.env.LOG_LEVEL = 'info';
|
||||
process.env.CURSOR_COMPATIBLE = 'true';
|
||||
|
||||
// Ensure we have a clean environment for Cursor
|
||||
delete process.env.SILENT_MCP_RUNNING;
|
||||
} else {
|
||||
// For normal operation, silence logs
|
||||
process.env.LOG_LEVEL = 'silent';
|
||||
}
|
||||
|
||||
// Ensure logs directory exists
|
||||
const logsDir = path.join(process.cwd(), 'logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Check if .env exists, create from example if not
|
||||
const envPath = path.join(process.cwd(), '.env');
|
||||
const envExamplePath = path.join(process.cwd(), '.env.example');
|
||||
|
||||
if (!fs.existsSync(envPath) && fs.existsSync(envExamplePath)) {
|
||||
fs.copyFileSync(envExamplePath, envPath);
|
||||
}
|
||||
|
||||
// Define a function to ensure the child process is properly cleaned up on exit
|
||||
function setupCleanExit(childProcess) {
|
||||
const exitHandler = () => {
|
||||
if (childProcess && !childProcess.killed) {
|
||||
childProcess.kill();
|
||||
}
|
||||
process.exit();
|
||||
};
|
||||
|
||||
// Handle various termination signals
|
||||
process.on('SIGINT', exitHandler);
|
||||
process.on('SIGTERM', exitHandler);
|
||||
process.on('exit', exitHandler);
|
||||
}
|
||||
|
||||
// Start the MCP server
|
||||
try {
|
||||
// Critical: For Cursor, we need a very specific execution environment
|
||||
if (isCursor) {
|
||||
// Careful process cleanup for Cursor (optional but can help)
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
execSync('pkill -f "node.*stdio-server" || true', { stdio: 'ignore' });
|
||||
} catch (e) {
|
||||
// Ignore errors from process cleanup
|
||||
}
|
||||
|
||||
// Allow some time for process cleanup
|
||||
setTimeout(() => {
|
||||
const scriptPath = path.join(__dirname, 'mcp-stdio.cjs');
|
||||
|
||||
// For Cursor, we need very specific stdio handling
|
||||
// Using pipe for both stdin and stdout is critical
|
||||
const childProcess = spawn('node', [scriptPath], {
|
||||
stdio: ['pipe', 'pipe', 'pipe'], // All piped for maximum control
|
||||
env: {
|
||||
...process.env,
|
||||
USE_STDIO_TRANSPORT: 'true',
|
||||
CURSOR_COMPATIBLE: 'true',
|
||||
// Make sure stdin/stdout are treated as binary
|
||||
NODE_OPTIONS: '--no-force-async-hooks-checks'
|
||||
}
|
||||
});
|
||||
|
||||
// Ensure no buffering to prevent missed messages
|
||||
childProcess.stdin.setDefaultEncoding('utf8');
|
||||
|
||||
// Create bidirectional pipes
|
||||
process.stdin.pipe(childProcess.stdin);
|
||||
childProcess.stdout.pipe(process.stdout);
|
||||
childProcess.stderr.pipe(process.stderr);
|
||||
|
||||
// Setup error handling
|
||||
childProcess.on('error', (err) => {
|
||||
console.error('Failed to start server:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Ensure child process is properly cleaned up
|
||||
setupCleanExit(childProcess);
|
||||
|
||||
}, 500); // Short delay to ensure clean start
|
||||
}
|
||||
// For regular use, if silent-mcp.sh exists, use it
|
||||
else if (!isCursor && fs.existsSync(path.join(process.cwd(), 'silent-mcp.sh')) &&
|
||||
fs.statSync(path.join(process.cwd(), 'silent-mcp.sh')).isFile()) {
|
||||
// Execute the silent-mcp.sh script
|
||||
const childProcess = spawn('/bin/bash', [path.join(process.cwd(), 'silent-mcp.sh')], {
|
||||
stdio: ['inherit', 'inherit', 'ignore'], // Redirect stderr to /dev/null
|
||||
env: {
|
||||
...process.env,
|
||||
USE_STDIO_TRANSPORT: 'true',
|
||||
LOG_LEVEL: 'silent'
|
||||
}
|
||||
});
|
||||
|
||||
childProcess.on('error', (err) => {
|
||||
console.error('Failed to start server:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Ensure child process is properly cleaned up
|
||||
setupCleanExit(childProcess);
|
||||
}
|
||||
// Otherwise run normally (direct non-Cursor)
|
||||
else {
|
||||
const scriptPath = path.join(__dirname, 'mcp-stdio.cjs');
|
||||
|
||||
const childProcess = spawn('node', [scriptPath], {
|
||||
stdio: ['inherit', 'pipe', 'ignore'], // Redirect stderr to /dev/null for normal use
|
||||
env: {
|
||||
...process.env,
|
||||
USE_STDIO_TRANSPORT: 'true'
|
||||
}
|
||||
});
|
||||
|
||||
// Pipe child's stdout to parent's stdout
|
||||
childProcess.stdout.pipe(process.stdout);
|
||||
|
||||
childProcess.on('error', (err) => {
|
||||
console.error('Failed to start server:', err.message);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// Ensure child process is properly cleaned up
|
||||
setupCleanExit(childProcess);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error starting server:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
62
bin/test-stdio.js
Executable file
62
bin/test-stdio.js
Executable file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Test script for MCP stdio transport
|
||||
*
|
||||
* This script sends JSON-RPC 2.0 requests to the MCP server
|
||||
* running in stdio mode and displays the responses.
|
||||
*
|
||||
* Usage: node test-stdio.js | node bin/mcp-stdio.cjs
|
||||
*/
|
||||
|
||||
// Send a ping request
|
||||
const pingRequest = {
|
||||
jsonrpc: "2.0",
|
||||
id: 1,
|
||||
method: "ping"
|
||||
};
|
||||
|
||||
// Send an info request
|
||||
const infoRequest = {
|
||||
jsonrpc: "2.0",
|
||||
id: 2,
|
||||
method: "info"
|
||||
};
|
||||
|
||||
// Send an echo request
|
||||
const echoRequest = {
|
||||
jsonrpc: "2.0",
|
||||
id: 3,
|
||||
method: "echo",
|
||||
params: {
|
||||
message: "Hello, MCP!",
|
||||
timestamp: new Date().toISOString(),
|
||||
test: true,
|
||||
count: 42
|
||||
}
|
||||
};
|
||||
|
||||
// Send the requests with a delay between them
|
||||
setTimeout(() => {
|
||||
console.log(JSON.stringify(pingRequest));
|
||||
}, 500);
|
||||
|
||||
setTimeout(() => {
|
||||
console.log(JSON.stringify(infoRequest));
|
||||
}, 1000);
|
||||
|
||||
setTimeout(() => {
|
||||
console.log(JSON.stringify(echoRequest));
|
||||
}, 1500);
|
||||
|
||||
// Process responses
|
||||
process.stdin.on('data', (data) => {
|
||||
try {
|
||||
const response = JSON.parse(data.toString());
|
||||
console.error('Received response:');
|
||||
console.error(JSON.stringify(response, null, 2));
|
||||
} catch (error) {
|
||||
console.error('Error parsing response:', error);
|
||||
console.error('Raw data:', data.toString());
|
||||
}
|
||||
});
|
||||
36
bunfig.toml
36
bunfig.toml
@@ -19,10 +19,34 @@ collectCoverageFrom = [
|
||||
]
|
||||
|
||||
[build]
|
||||
target = "node"
|
||||
target = "bun"
|
||||
outdir = "./dist"
|
||||
minify = true
|
||||
minify = {
|
||||
whitespace = true,
|
||||
syntax = true,
|
||||
identifiers = true,
|
||||
module = true
|
||||
}
|
||||
sourcemap = "external"
|
||||
entry = ["./src/index.ts", "./src/stdio-server.ts"]
|
||||
splitting = true
|
||||
naming = "[name].[hash].[ext]"
|
||||
publicPath = "/assets/"
|
||||
define = {
|
||||
"process.env.NODE_ENV": "process.env.NODE_ENV"
|
||||
}
|
||||
|
||||
[build.javascript]
|
||||
platform = "node"
|
||||
format = "esm"
|
||||
treeshaking = true
|
||||
packages = {
|
||||
external = ["bun:*"]
|
||||
}
|
||||
|
||||
[build.typescript]
|
||||
dts = true
|
||||
typecheck = true
|
||||
|
||||
[install]
|
||||
production = false
|
||||
@@ -48,6 +72,12 @@ reload = true
|
||||
[performance]
|
||||
gc = true
|
||||
optimize = true
|
||||
jit = true
|
||||
smol = true
|
||||
compact = true
|
||||
|
||||
[test.env]
|
||||
NODE_ENV = "test"
|
||||
NODE_ENV = "test"
|
||||
|
||||
[watch]
|
||||
ignore = ["**/node_modules/**", "**/dist/**", "**/.git/**"]
|
||||
@@ -1,5 +1,5 @@
|
||||
import fetch from "node-fetch";
|
||||
import OpenAI from "openai";
|
||||
import { Anthropic } from "@anthropic-ai/sdk";
|
||||
import { DOMParser, Element, Document } from '@xmldom/xmldom';
|
||||
import dotenv from 'dotenv';
|
||||
import readline from 'readline';
|
||||
@@ -9,11 +9,11 @@ import chalk from 'chalk';
|
||||
dotenv.config();
|
||||
|
||||
// Retrieve API keys from environment variables
|
||||
const openaiApiKey = process.env.OPENAI_API_KEY;
|
||||
const anthropicApiKey = process.env.ANTHROPIC_API_KEY;
|
||||
const hassToken = process.env.HASS_TOKEN;
|
||||
|
||||
if (!openaiApiKey) {
|
||||
console.error("Please set the OPENAI_API_KEY environment variable.");
|
||||
if (!anthropicApiKey) {
|
||||
console.error("Please set the ANTHROPIC_API_KEY environment variable.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ if (!hassToken) {
|
||||
}
|
||||
|
||||
// MCP Server configuration
|
||||
const MCP_SERVER = process.env.MCP_SERVER || 'http://localhost:3000';
|
||||
const MCP_SERVER = 'http://localhost:3000';
|
||||
|
||||
interface McpTool {
|
||||
name: string;
|
||||
@@ -113,13 +113,11 @@ interface ModelConfig {
|
||||
contextWindow: number;
|
||||
}
|
||||
|
||||
// Update model listing to filter based on API key availability
|
||||
// Update model listing to use Anthropic's Claude models
|
||||
const AVAILABLE_MODELS: ModelConfig[] = [
|
||||
// OpenAI models always available
|
||||
{ name: 'gpt-4', maxTokens: 8192, contextWindow: 8192 },
|
||||
{ name: 'gpt-4-turbo-preview', maxTokens: 4096, contextWindow: 128000 },
|
||||
{ name: 'gpt-3.5-turbo', maxTokens: 4096, contextWindow: 16385 },
|
||||
{ name: 'gpt-3.5-turbo-16k', maxTokens: 16385, contextWindow: 16385 },
|
||||
// Anthropic Claude models
|
||||
{ name: 'claude-3-7-sonnet-20250219', maxTokens: 4096, contextWindow: 200000 },
|
||||
{ name: 'claude-3-5-haiku-20241022', maxTokens: 4096, contextWindow: 200000 },
|
||||
|
||||
// Conditionally include DeepSeek models
|
||||
...(process.env.DEEPSEEK_API_KEY ? [
|
||||
@@ -131,7 +129,7 @@ const AVAILABLE_MODELS: ModelConfig[] = [
|
||||
// Add configuration interface
|
||||
interface AppConfig {
|
||||
mcpServer: string;
|
||||
openaiModel: string;
|
||||
anthropicModel: string;
|
||||
maxRetries: number;
|
||||
analysisTimeout: number;
|
||||
selectedModel: ModelConfig;
|
||||
@@ -146,30 +144,31 @@ const logger = {
|
||||
debug: (msg: string) => process.env.DEBUG && console.log(chalk.gray(`› ${msg}`))
|
||||
};
|
||||
|
||||
// Update default model selection in loadConfig
|
||||
// Update loadConfig to use Claude models
|
||||
function loadConfig(): AppConfig {
|
||||
// Always use gpt-4 for now
|
||||
const defaultModel = AVAILABLE_MODELS.find(m => m.name === 'gpt-4') || AVAILABLE_MODELS[0];
|
||||
// Use Claude 3.7 Sonnet as the default model
|
||||
const defaultModel = AVAILABLE_MODELS.find(m => m.name === 'claude-3-7-sonnet-20250219') || AVAILABLE_MODELS[0];
|
||||
|
||||
return {
|
||||
mcpServer: process.env.MCP_SERVER || 'http://localhost:3000',
|
||||
openaiModel: defaultModel.name,
|
||||
anthropicModel: defaultModel.name,
|
||||
maxRetries: parseInt(process.env.MAX_RETRIES || '3'),
|
||||
analysisTimeout: parseInt(process.env.ANALYSIS_TIMEOUT || '30000'),
|
||||
selectedModel: defaultModel
|
||||
};
|
||||
}
|
||||
|
||||
function getOpenAIClient(): OpenAI {
|
||||
// Replace OpenAI client with Anthropic client
|
||||
function getAnthropicClient(): Anthropic {
|
||||
const config = loadConfig();
|
||||
|
||||
return new OpenAI({
|
||||
apiKey: config.selectedModel.name.startsWith('deepseek')
|
||||
? process.env.DEEPSEEK_API_KEY
|
||||
: openaiApiKey,
|
||||
baseURL: config.selectedModel.name.startsWith('deepseek')
|
||||
? 'https://api.deepseek.com/v1'
|
||||
: 'https://api.openai.com/v1'
|
||||
if (config.selectedModel.name.startsWith('deepseek') && process.env.DEEPSEEK_API_KEY) {
|
||||
// This is just a stub for DeepSeek - you'd need to implement this properly
|
||||
throw new Error("DeepSeek models not implemented yet with Anthropic integration");
|
||||
}
|
||||
|
||||
return new Anthropic({
|
||||
apiKey: anthropicApiKey,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -463,7 +462,7 @@ function getRelevantDeviceTypes(prompt: string): string[] {
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates analysis and recommendations using the OpenAI API based on the Home Assistant data
|
||||
* Generates analysis and recommendations using the Anthropic API based on the Home Assistant data
|
||||
*/
|
||||
async function generateAnalysis(haInfo: any): Promise<SystemAnalysis> {
|
||||
const config = loadConfig();
|
||||
@@ -520,7 +519,7 @@ async function generateAnalysis(haInfo: any): Promise<SystemAnalysis> {
|
||||
}
|
||||
|
||||
// Original analysis code for non-test mode
|
||||
const openai = getOpenAIClient();
|
||||
const anthropic = getAnthropicClient();
|
||||
|
||||
const systemSummary = {
|
||||
total_devices: haInfo.device_summary?.total_devices || 0,
|
||||
@@ -588,20 +587,21 @@ Generate your response in this EXACT format:
|
||||
</analysis>`;
|
||||
|
||||
try {
|
||||
const completion = await openai.chat.completions.create({
|
||||
const completion = await anthropic.messages.create({
|
||||
model: config.selectedModel.name,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "You are a Home Assistant expert. Analyze the system data and provide detailed insights in the specified XML format. Be specific and actionable in your recommendations."
|
||||
},
|
||||
{ role: "user", content: prompt }
|
||||
role: "user",
|
||||
content: `<system>You are a Home Assistant expert. Analyze the system data and provide detailed insights in the specified XML format. Be specific and actionable in your recommendations.</system>
|
||||
|
||||
${prompt}`
|
||||
}
|
||||
],
|
||||
temperature: 0.7,
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 4000)
|
||||
});
|
||||
|
||||
const result = completion.choices[0].message?.content || "";
|
||||
const result = completion.content[0]?.type === 'text' ? completion.content[0].text : "";
|
||||
|
||||
// Clean the response and parse XML
|
||||
const cleanedResult = result.replace(/```xml/g, '').replace(/```/g, '').trim();
|
||||
@@ -673,7 +673,7 @@ Generate your response in this EXACT format:
|
||||
throw new Error(`Failed to parse analysis response: ${parseError.message}`);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error during OpenAI API call:", error);
|
||||
console.error("Error during Anthropic API call:", error);
|
||||
throw new Error("Failed to generate analysis");
|
||||
}
|
||||
}
|
||||
@@ -814,7 +814,7 @@ async function handleAutomationOptimization(haInfo: any): Promise<void> {
|
||||
}
|
||||
|
||||
async function analyzeAutomations(automations: any[]): Promise<string> {
|
||||
const openai = getOpenAIClient();
|
||||
const anthropic = getAnthropicClient();
|
||||
const config = loadConfig();
|
||||
|
||||
// Create a more detailed summary of automations
|
||||
@@ -894,20 +894,21 @@ Focus on:
|
||||
5. Analyzing the distribution of automation types and suggesting optimizations`;
|
||||
|
||||
try {
|
||||
const completion = await openai.chat.completions.create({
|
||||
const completion = await anthropic.messages.create({
|
||||
model: config.selectedModel.name,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "You are a Home Assistant automation expert. Analyze the provided automation summary and respond with specific, actionable suggestions in the required XML format."
|
||||
},
|
||||
{ role: "user", content: prompt }
|
||||
role: "user",
|
||||
content: `<system>You are a Home Assistant automation expert. Analyze the provided automation summary and respond with specific, actionable suggestions in the required XML format.</system>
|
||||
|
||||
${prompt}`
|
||||
}
|
||||
],
|
||||
temperature: 0.2,
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 2048)
|
||||
});
|
||||
|
||||
const response = completion.choices[0].message?.content || "";
|
||||
const response = completion.content[0]?.type === 'text' ? completion.content[0].text : "";
|
||||
|
||||
// Ensure the response is valid XML
|
||||
if (!response.trim().startsWith('<analysis>')) {
|
||||
@@ -945,7 +946,7 @@ Focus on:
|
||||
}
|
||||
}
|
||||
|
||||
// Add new handleCustomPrompt function
|
||||
// Update handleCustomPrompt function to use Anthropic
|
||||
async function handleCustomPrompt(haInfo: any, customPrompt: string): Promise<void> {
|
||||
try {
|
||||
// Add device metadata
|
||||
@@ -1027,15 +1028,15 @@ async function handleCustomPrompt(haInfo: any, customPrompt: string): Promise<vo
|
||||
return;
|
||||
}
|
||||
|
||||
const openai = getOpenAIClient();
|
||||
const anthropic = getAnthropicClient();
|
||||
const config = loadConfig();
|
||||
|
||||
const completion = await openai.chat.completions.create({
|
||||
const completion = await anthropic.messages.create({
|
||||
model: config.selectedModel.name,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: `You are a Home Assistant expert. Analyze the following Home Assistant information and respond to the user's prompt.
|
||||
role: "user",
|
||||
content: `<system>You are a Home Assistant expert. Analyze the following Home Assistant information and respond to the user's prompt.
|
||||
Current system has ${totalDevices} devices across ${deviceTypes.length} types.
|
||||
Device distribution: ${deviceSummary}
|
||||
|
||||
@@ -1047,16 +1048,17 @@ async function handleCustomPrompt(haInfo: any, customPrompt: string): Promise<vo
|
||||
- Service domains used: ${automationSummary.service_domains.join(', ')}
|
||||
|
||||
Detailed Automation List:
|
||||
${JSON.stringify(automationDetails, null, 2)}`
|
||||
},
|
||||
{ role: "user", content: customPrompt },
|
||||
${JSON.stringify(automationDetails, null, 2)}</system>
|
||||
|
||||
${customPrompt}`
|
||||
}
|
||||
],
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 2048), // Limit token usage
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 2048),
|
||||
temperature: 0.3,
|
||||
});
|
||||
|
||||
console.log("\nAnalysis Results:\n");
|
||||
console.log(completion.choices[0].message?.content || "No response generated");
|
||||
console.log(completion.content[0]?.type === 'text' ? completion.content[0].text : "No response generated");
|
||||
|
||||
} catch (error) {
|
||||
console.error("Error processing custom prompt:", error);
|
||||
@@ -1075,24 +1077,25 @@ async function handleCustomPrompt(haInfo: any, customPrompt: string): Promise<vo
|
||||
// Retry with simplified prompt if there's an error
|
||||
try {
|
||||
const retryPrompt = "Please provide a simpler analysis of the Home Assistant system.";
|
||||
const openai = getOpenAIClient();
|
||||
const anthropic = getAnthropicClient();
|
||||
const config = loadConfig();
|
||||
|
||||
const retryCompletion = await openai.chat.completions.create({
|
||||
const retryCompletion = await anthropic.messages.create({
|
||||
model: config.selectedModel.name,
|
||||
messages: [
|
||||
{
|
||||
role: "system",
|
||||
content: "You are a Home Assistant expert. Provide a simple analysis of the system."
|
||||
},
|
||||
{ role: "user", content: retryPrompt },
|
||||
role: "user",
|
||||
content: `<system>You are a Home Assistant expert. Provide a simple analysis of the system.</system>
|
||||
|
||||
${retryPrompt}`
|
||||
}
|
||||
],
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 2048), // Limit token usage
|
||||
max_tokens: Math.min(config.selectedModel.maxTokens, 2048),
|
||||
temperature: 0.3,
|
||||
});
|
||||
|
||||
console.log("\nAnalysis Results:\n");
|
||||
console.log(retryCompletion.choices[0].message?.content || "No response generated");
|
||||
console.log(retryCompletion.content[0]?.type === 'text' ? retryCompletion.content[0].text : "No response generated");
|
||||
} catch (retryError) {
|
||||
console.error("Error during retry:", retryError);
|
||||
}
|
||||
@@ -1174,9 +1177,9 @@ function getItems(xmlDoc: Document, path: string): string[] {
|
||||
.map(item => (item as Element).textContent || "");
|
||||
}
|
||||
|
||||
// Replace the Express server initialization at the bottom with Bun's server
|
||||
if (process.env.PROCESSOR_TYPE === 'openai') {
|
||||
// Initialize Bun server for OpenAI
|
||||
// Replace the Express/Bun server initialization
|
||||
if (process.env.PROCESSOR_TYPE === 'anthropic') {
|
||||
// Initialize Bun server for Anthropic
|
||||
const server = Bun.serve({
|
||||
port: process.env.PORT || 3000,
|
||||
async fetch(req) {
|
||||
@@ -1206,7 +1209,7 @@ if (process.env.PROCESSOR_TYPE === 'openai') {
|
||||
},
|
||||
});
|
||||
|
||||
console.log(`[OpenAI Server] Running on port ${server.port}`);
|
||||
console.log(`[Anthropic Server] Running on port ${server.port}`);
|
||||
} else {
|
||||
console.log('[Claude Mode] Using stdio communication');
|
||||
}
|
||||
|
||||
9
fix-env.js
Normal file
9
fix-env.js
Normal file
@@ -0,0 +1,9 @@
|
||||
// This script fixes the NODE_ENV environment variable before any imports
|
||||
console.log('Setting NODE_ENV to "development" before imports');
|
||||
process.env.NODE_ENV = "development";
|
||||
|
||||
// Add more debugging
|
||||
console.log(`NODE_ENV is now set to: "${process.env.NODE_ENV}"`);
|
||||
|
||||
// Import the main application
|
||||
import './dist/index.js';
|
||||
46
package.json
46
package.json
@@ -4,10 +4,20 @@
|
||||
"description": "Home Assistant Model Context Protocol",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"bin": {
|
||||
"homeassistant-mcp": "./bin/npx-entry.cjs",
|
||||
"mcp-stdio": "./bin/npx-entry.cjs"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "bun run dist/index.js",
|
||||
"start:stdio": "bun run dist/stdio-server.js",
|
||||
"dev": "bun --hot --watch src/index.ts",
|
||||
"build": "bun build ./src/index.ts --outdir ./dist --target bun --minify",
|
||||
"build:all": "bun build ./src/index.ts ./src/stdio-server.ts --outdir ./dist --target bun --minify",
|
||||
"build:node": "bun build ./src/index.ts --outdir ./dist --target node --minify",
|
||||
"build:stdio": "bun build ./src/stdio-server.ts --outdir ./dist --target node --minify",
|
||||
"prepare": "husky install && bun run build:all",
|
||||
"stdio": "node ./bin/mcp-stdio.js",
|
||||
"test": "bun test",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:coverage": "bun test --coverage",
|
||||
@@ -17,29 +27,36 @@
|
||||
"test:staged": "bun test --findRelatedTests",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"format": "prettier --write \"src/**/*.ts\"",
|
||||
"prepare": "husky install",
|
||||
"profile": "bun --inspect src/index.ts",
|
||||
"clean": "rm -rf dist .bun coverage",
|
||||
"typecheck": "bun x tsc --noEmit",
|
||||
"example:speech": "bun run extra/speech-to-text-example.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.39.0",
|
||||
"@elysiajs/cors": "^1.2.0",
|
||||
"@elysiajs/swagger": "^1.2.0",
|
||||
"@types/express-rate-limit": "^5.1.3",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/sanitize-html": "^2.9.5",
|
||||
"@types/sanitize-html": "^2.13.0",
|
||||
"@types/swagger-ui-express": "^4.1.8",
|
||||
"@types/ws": "^8.5.10",
|
||||
"@xmldom/xmldom": "^0.9.7",
|
||||
"chalk": "^5.4.1",
|
||||
"cors": "^2.8.5",
|
||||
"dotenv": "^16.4.7",
|
||||
"elysia": "^1.2.11",
|
||||
"express": "^4.21.2",
|
||||
"express-rate-limit": "^7.5.0",
|
||||
"helmet": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"node-fetch": "^3.3.2",
|
||||
"node-record-lpcm16": "^1.0.1",
|
||||
"openai": "^4.83.0",
|
||||
"sanitize-html": "^2.11.0",
|
||||
"openapi-types": "^12.1.3",
|
||||
"sanitize-html": "^2.15.0",
|
||||
"swagger-ui-express": "^5.0.1",
|
||||
"typescript": "^5.3.3",
|
||||
"winston": "^3.11.0",
|
||||
"winston-daily-rotate-file": "^5.0.0",
|
||||
@@ -49,21 +66,34 @@
|
||||
"devDependencies": {
|
||||
"@jest/globals": "^29.7.0",
|
||||
"@types/bun": "latest",
|
||||
"@types/cors": "^2.8.17",
|
||||
"@types/express": "^5.0.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.0",
|
||||
"@typescript-eslint/parser": "^7.1.0",
|
||||
"ajv": "^8.17.1",
|
||||
"bun-types": "^1.2.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"husky": "^9.0.11",
|
||||
"prettier": "^3.2.5",
|
||||
"supertest": "^6.3.3",
|
||||
"uuid": "^11.0.5"
|
||||
"supertest": "^7.1.0",
|
||||
"uuid": "^11.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"bun": ">=1.0.0"
|
||||
}
|
||||
}
|
||||
"bun": ">=1.0.0",
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"bin",
|
||||
"README.md",
|
||||
"LICENSE"
|
||||
]
|
||||
}
|
||||
|
||||
239
smithery.yaml
239
smithery.yaml
@@ -11,10 +11,21 @@ startCommand:
|
||||
hassToken:
|
||||
type: string
|
||||
description: The token for connecting to Home Assistant API.
|
||||
port:
|
||||
hassHost:
|
||||
type: string
|
||||
default: http://homeassistant.local:8123
|
||||
description: The host for connecting to Home Assistant API.
|
||||
hassSocketUrl:
|
||||
type: string
|
||||
default: ws://homeassistant.local:8123
|
||||
description: The socket URL for connecting to Home Assistant API.
|
||||
mcp-port:
|
||||
type: number
|
||||
default: 4000
|
||||
default: 7123
|
||||
description: The port on which the MCP server will run.
|
||||
debug:
|
||||
type: boolean
|
||||
description: The debug mode for the MCP server.
|
||||
commandFunction:
|
||||
# A function that produces the CLI command to start the MCP on stdio.
|
||||
|-
|
||||
@@ -23,6 +34,228 @@ startCommand:
|
||||
args: ['--smol', 'run', 'start'],
|
||||
env: {
|
||||
HASS_TOKEN: config.hassToken,
|
||||
PORT: config.port.toString()
|
||||
HASS_HOST: config.hassHost || process.env.HASS_HOST,
|
||||
HASS_SOCKET_URL: config.hassSocketUrl || process.env.HASS_SOCKET_URL,
|
||||
PORT: config.port.toString(),
|
||||
DEBUG: config.debug !== undefined ? config.debug.toString() : process.env.DEBUG || 'false'
|
||||
}
|
||||
})
|
||||
|
||||
# Define the tools that this MCP server provides
|
||||
tools:
|
||||
- name: list_devices
|
||||
description: List all devices connected to Home Assistant
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
domain:
|
||||
type: string
|
||||
enum:
|
||||
- light
|
||||
- climate
|
||||
- alarm_control_panel
|
||||
- cover
|
||||
- switch
|
||||
- contact
|
||||
- media_player
|
||||
- fan
|
||||
- lock
|
||||
- vacuum
|
||||
- scene
|
||||
- script
|
||||
- camera
|
||||
area:
|
||||
type: string
|
||||
floor:
|
||||
type: string
|
||||
required: []
|
||||
|
||||
- name: control
|
||||
description: Control Home Assistant entities (lights, climate, etc.)
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
command:
|
||||
type: string
|
||||
enum:
|
||||
- turn_on
|
||||
- turn_off
|
||||
- toggle
|
||||
- open
|
||||
- close
|
||||
- stop
|
||||
- set_position
|
||||
- set_tilt_position
|
||||
- set_temperature
|
||||
- set_hvac_mode
|
||||
- set_fan_mode
|
||||
- set_humidity
|
||||
entity_id:
|
||||
type: string
|
||||
state:
|
||||
type: string
|
||||
brightness:
|
||||
type: number
|
||||
color_temp:
|
||||
type: number
|
||||
rgb_color:
|
||||
type: array
|
||||
items:
|
||||
type: number
|
||||
position:
|
||||
type: number
|
||||
tilt_position:
|
||||
type: number
|
||||
temperature:
|
||||
type: number
|
||||
target_temp_high:
|
||||
type: number
|
||||
target_temp_low:
|
||||
type: number
|
||||
hvac_mode:
|
||||
type: string
|
||||
fan_mode:
|
||||
type: string
|
||||
humidity:
|
||||
type: number
|
||||
required:
|
||||
- command
|
||||
- entity_id
|
||||
|
||||
- name: history
|
||||
description: Retrieve historical data for Home Assistant entities
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
entity_id:
|
||||
type: string
|
||||
start_time:
|
||||
type: string
|
||||
end_time:
|
||||
type: string
|
||||
limit:
|
||||
type: number
|
||||
required:
|
||||
- entity_id
|
||||
|
||||
- name: scene
|
||||
description: Activate scenes in Home Assistant
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
scene_id:
|
||||
type: string
|
||||
required:
|
||||
- scene_id
|
||||
|
||||
- name: notify
|
||||
description: Send notifications through Home Assistant
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
message:
|
||||
type: string
|
||||
title:
|
||||
type: string
|
||||
target:
|
||||
type: string
|
||||
required:
|
||||
- message
|
||||
|
||||
- name: automation
|
||||
description: Manage Home Assistant automations
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
action:
|
||||
type: string
|
||||
enum:
|
||||
- trigger
|
||||
- enable
|
||||
- disable
|
||||
- toggle
|
||||
- list
|
||||
automation_id:
|
||||
type: string
|
||||
required:
|
||||
- action
|
||||
|
||||
- name: addon
|
||||
description: Manage Home Assistant add-ons
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
action:
|
||||
type: string
|
||||
enum:
|
||||
- list
|
||||
- info
|
||||
- start
|
||||
- stop
|
||||
- restart
|
||||
- update
|
||||
addon_slug:
|
||||
type: string
|
||||
required:
|
||||
- action
|
||||
|
||||
- name: package
|
||||
description: Manage Home Assistant HACS packages
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
action:
|
||||
type: string
|
||||
enum:
|
||||
- list
|
||||
- info
|
||||
- install
|
||||
- uninstall
|
||||
- update
|
||||
package_id:
|
||||
type: string
|
||||
required:
|
||||
- action
|
||||
|
||||
- name: automation_config
|
||||
description: Get or update Home Assistant automation configurations
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
action:
|
||||
type: string
|
||||
enum:
|
||||
- get
|
||||
- update
|
||||
- create
|
||||
- delete
|
||||
automation_id:
|
||||
type: string
|
||||
config:
|
||||
type: object
|
||||
required:
|
||||
- action
|
||||
|
||||
- name: subscribe_events
|
||||
description: Subscribe to Home Assistant events via SSE
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
events:
|
||||
type: array
|
||||
items:
|
||||
type: string
|
||||
entity_id:
|
||||
type: string
|
||||
domain:
|
||||
type: string
|
||||
required: []
|
||||
|
||||
- name: get_sse_stats
|
||||
description: Get statistics about SSE connections
|
||||
parameters:
|
||||
type: object
|
||||
properties:
|
||||
detailed:
|
||||
type: boolean
|
||||
required: []
|
||||
|
||||
106
src/__tests__/config.test.ts
Normal file
106
src/__tests__/config.test.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { expect, test, describe, beforeEach, afterEach } from 'bun:test';
|
||||
import { MCPServerConfigSchema } from '../schemas/config.schema.js';
|
||||
|
||||
describe('Configuration Validation', () => {
|
||||
const originalEnv = { ...process.env };
|
||||
|
||||
beforeEach(() => {
|
||||
// Reset environment variables before each test
|
||||
process.env = { ...originalEnv };
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Restore original environment after each test
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
test('validates default configuration', () => {
|
||||
const config = MCPServerConfigSchema.parse({});
|
||||
expect(config).toBeDefined();
|
||||
expect(config.port).toBe(3000);
|
||||
expect(config.environment).toBe('development');
|
||||
});
|
||||
|
||||
test('validates custom port', () => {
|
||||
const config = MCPServerConfigSchema.parse({ port: 8080 });
|
||||
expect(config.port).toBe(8080);
|
||||
});
|
||||
|
||||
test('rejects invalid port', () => {
|
||||
expect(() => MCPServerConfigSchema.parse({ port: 0 })).toThrow();
|
||||
expect(() => MCPServerConfigSchema.parse({ port: 70000 })).toThrow();
|
||||
});
|
||||
|
||||
test('validates environment values', () => {
|
||||
expect(() => MCPServerConfigSchema.parse({ environment: 'development' })).not.toThrow();
|
||||
expect(() => MCPServerConfigSchema.parse({ environment: 'production' })).not.toThrow();
|
||||
expect(() => MCPServerConfigSchema.parse({ environment: 'test' })).not.toThrow();
|
||||
expect(() => MCPServerConfigSchema.parse({ environment: 'invalid' })).toThrow();
|
||||
});
|
||||
|
||||
test('validates rate limiting configuration', () => {
|
||||
const config = MCPServerConfigSchema.parse({
|
||||
rateLimit: {
|
||||
maxRequests: 50,
|
||||
maxAuthRequests: 10
|
||||
}
|
||||
});
|
||||
expect(config.rateLimit.maxRequests).toBe(50);
|
||||
expect(config.rateLimit.maxAuthRequests).toBe(10);
|
||||
});
|
||||
|
||||
test('rejects invalid rate limit values', () => {
|
||||
expect(() => MCPServerConfigSchema.parse({
|
||||
rateLimit: {
|
||||
maxRequests: 0,
|
||||
maxAuthRequests: 5
|
||||
}
|
||||
})).toThrow();
|
||||
|
||||
expect(() => MCPServerConfigSchema.parse({
|
||||
rateLimit: {
|
||||
maxRequests: 100,
|
||||
maxAuthRequests: -1
|
||||
}
|
||||
})).toThrow();
|
||||
});
|
||||
|
||||
test('validates execution timeout', () => {
|
||||
const config = MCPServerConfigSchema.parse({ executionTimeout: 5000 });
|
||||
expect(config.executionTimeout).toBe(5000);
|
||||
});
|
||||
|
||||
test('rejects invalid execution timeout', () => {
|
||||
expect(() => MCPServerConfigSchema.parse({ executionTimeout: 500 })).toThrow();
|
||||
expect(() => MCPServerConfigSchema.parse({ executionTimeout: 400000 })).toThrow();
|
||||
});
|
||||
|
||||
test('validates transport settings', () => {
|
||||
const config = MCPServerConfigSchema.parse({
|
||||
useStdioTransport: true,
|
||||
useHttpTransport: false
|
||||
});
|
||||
expect(config.useStdioTransport).toBe(true);
|
||||
expect(config.useHttpTransport).toBe(false);
|
||||
});
|
||||
|
||||
test('validates CORS settings', () => {
|
||||
const config = MCPServerConfigSchema.parse({
|
||||
corsOrigin: 'https://example.com'
|
||||
});
|
||||
expect(config.corsOrigin).toBe('https://example.com');
|
||||
});
|
||||
|
||||
test('validates debug settings', () => {
|
||||
const config = MCPServerConfigSchema.parse({
|
||||
debugMode: true,
|
||||
debugStdio: true,
|
||||
debugHttp: true,
|
||||
silentStartup: false
|
||||
});
|
||||
expect(config.debugMode).toBe(true);
|
||||
expect(config.debugStdio).toBe(true);
|
||||
expect(config.debugHttp).toBe(true);
|
||||
expect(config.silentStartup).toBe(false);
|
||||
});
|
||||
});
|
||||
85
src/__tests__/rate-limit.test.ts
Normal file
85
src/__tests__/rate-limit.test.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { expect, test, describe, beforeAll, afterAll } from 'bun:test';
|
||||
import express from 'express';
|
||||
import { apiLimiter, authLimiter } from '../middleware/rate-limit.middleware.js';
|
||||
import supertest from 'supertest';
|
||||
|
||||
describe('Rate Limiting Middleware', () => {
|
||||
let app: express.Application;
|
||||
let request: supertest.SuperTest<supertest.Test>;
|
||||
|
||||
beforeAll(() => {
|
||||
app = express();
|
||||
|
||||
// Set up test routes with rate limiting
|
||||
app.use('/api', apiLimiter);
|
||||
app.use('/auth', authLimiter);
|
||||
|
||||
// Test endpoints
|
||||
app.get('/api/test', (req, res) => {
|
||||
res.json({ message: 'API test successful' });
|
||||
});
|
||||
|
||||
app.post('/auth/login', (req, res) => {
|
||||
res.json({ message: 'Login successful' });
|
||||
});
|
||||
|
||||
request = supertest(app);
|
||||
});
|
||||
|
||||
test('allows requests within API rate limit', async () => {
|
||||
// Make multiple requests within the limit
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const response = await request.get('/api/test');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('API test successful');
|
||||
}
|
||||
});
|
||||
|
||||
test('enforces API rate limit', async () => {
|
||||
// Make more requests than the limit allows
|
||||
const requests = Array(150).fill(null).map(() =>
|
||||
request.get('/api/test')
|
||||
);
|
||||
|
||||
const responses = await Promise.all(requests);
|
||||
|
||||
// Some requests should be successful, others should be rate limited
|
||||
const successfulRequests = responses.filter(r => r.status === 200);
|
||||
const limitedRequests = responses.filter(r => r.status === 429);
|
||||
|
||||
expect(successfulRequests.length).toBeGreaterThan(0);
|
||||
expect(limitedRequests.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('allows requests within auth rate limit', async () => {
|
||||
// Make multiple requests within the limit
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const response = await request.post('/auth/login');
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.message).toBe('Login successful');
|
||||
}
|
||||
});
|
||||
|
||||
test('enforces stricter auth rate limit', async () => {
|
||||
// Make more requests than the auth limit allows
|
||||
const requests = Array(10).fill(null).map(() =>
|
||||
request.post('/auth/login')
|
||||
);
|
||||
|
||||
const responses = await Promise.all(requests);
|
||||
|
||||
// Some requests should be successful, others should be rate limited
|
||||
const successfulRequests = responses.filter(r => r.status === 200);
|
||||
const limitedRequests = responses.filter(r => r.status === 429);
|
||||
|
||||
expect(successfulRequests.length).toBeLessThan(10);
|
||||
expect(limitedRequests.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
test('includes rate limit headers', async () => {
|
||||
const response = await request.get('/api/test');
|
||||
expect(response.headers['ratelimit-limit']).toBeDefined();
|
||||
expect(response.headers['ratelimit-remaining']).toBeDefined();
|
||||
expect(response.headers['ratelimit-reset']).toBeDefined();
|
||||
});
|
||||
});
|
||||
169
src/__tests__/security.test.ts
Normal file
169
src/__tests__/security.test.ts
Normal file
@@ -0,0 +1,169 @@
|
||||
import { describe, expect, test, beforeEach } from 'bun:test';
|
||||
import express, { Request, Response } from 'express';
|
||||
import request from 'supertest';
|
||||
import { SecurityMiddleware } from '../security/enhanced-middleware';
|
||||
|
||||
describe('SecurityMiddleware', () => {
|
||||
const app = express();
|
||||
|
||||
// Initialize security middleware
|
||||
SecurityMiddleware.initialize(app);
|
||||
|
||||
// Test routes
|
||||
app.get('/test', (_req: Request, res: Response) => {
|
||||
res.status(200).json({ message: 'Test successful' });
|
||||
});
|
||||
|
||||
app.post('/test', (req: Request, res: Response) => {
|
||||
res.status(200).json(req.body);
|
||||
});
|
||||
|
||||
app.post('/auth/login', (_req: Request, res: Response) => {
|
||||
res.status(200).json({ message: 'Auth successful' });
|
||||
});
|
||||
|
||||
describe('Security Headers', () => {
|
||||
test('should set security headers correctly', async () => {
|
||||
const response = await request(app).get('/test');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['x-frame-options']).toBe('DENY');
|
||||
expect(response.headers['x-xss-protection']).toBe('1; mode=block');
|
||||
expect(response.headers['x-content-type-options']).toBe('nosniff');
|
||||
expect(response.headers['referrer-policy']).toBe('strict-origin-when-cross-origin');
|
||||
expect(response.headers['strict-transport-security']).toBe('max-age=31536000; includeSubDomains; preload');
|
||||
expect(response.headers['x-permitted-cross-domain-policies']).toBe('none');
|
||||
expect(response.headers['cross-origin-embedder-policy']).toBe('require-corp');
|
||||
expect(response.headers['cross-origin-opener-policy']).toBe('same-origin');
|
||||
expect(response.headers['cross-origin-resource-policy']).toBe('same-origin');
|
||||
expect(response.headers['origin-agent-cluster']).toBe('?1');
|
||||
expect(response.headers['x-powered-by']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('should set Content-Security-Policy header correctly', async () => {
|
||||
const response = await request(app).get('/test');
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.headers['content-security-policy']).toContain("default-src 'self'");
|
||||
expect(response.headers['content-security-policy']).toContain("script-src 'self' 'unsafe-inline'");
|
||||
expect(response.headers['content-security-policy']).toContain("style-src 'self' 'unsafe-inline'");
|
||||
expect(response.headers['content-security-policy']).toContain("img-src 'self' data: https:");
|
||||
expect(response.headers['content-security-policy']).toContain("font-src 'self'");
|
||||
expect(response.headers['content-security-policy']).toContain("connect-src 'self'");
|
||||
expect(response.headers['content-security-policy']).toContain("frame-ancestors 'none'");
|
||||
expect(response.headers['content-security-policy']).toContain("form-action 'self'");
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request Validation', () => {
|
||||
test('should reject requests with long URLs', async () => {
|
||||
const longUrl = '/test?' + 'x'.repeat(2500);
|
||||
const response = await request(app).get(longUrl);
|
||||
expect(response.status).toBe(413);
|
||||
expect(response.body.error).toBe(true);
|
||||
expect(response.body.message).toContain('URL too long');
|
||||
});
|
||||
|
||||
test('should reject large request bodies', async () => {
|
||||
const largeBody = { data: 'x'.repeat(2 * 1024 * 1024) }; // 2MB
|
||||
const response = await request(app)
|
||||
.post('/test')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send(largeBody);
|
||||
expect(response.status).toBe(413);
|
||||
expect(response.body.error).toBe(true);
|
||||
expect(response.body.message).toContain('Request body too large');
|
||||
});
|
||||
|
||||
test('should require correct content type for POST requests', async () => {
|
||||
const response = await request(app)
|
||||
.post('/test')
|
||||
.set('Content-Type', 'text/plain')
|
||||
.send('test data');
|
||||
expect(response.status).toBe(415);
|
||||
expect(response.body.error).toBe(true);
|
||||
expect(response.body.message).toContain('Content-Type must be application/json');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Sanitization', () => {
|
||||
test('should sanitize string input with HTML', async () => {
|
||||
const response = await request(app)
|
||||
.post('/test')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({ text: '<script>alert("xss")</script>Hello<img src="x" onerror="alert(1)">' });
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.text).toBe('Hello');
|
||||
});
|
||||
|
||||
test('should sanitize nested object input', async () => {
|
||||
const response = await request(app)
|
||||
.post('/test')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({
|
||||
user: {
|
||||
name: '<script>alert("xss")</script>John',
|
||||
bio: '<img src="x" onerror="alert(1)">Developer'
|
||||
}
|
||||
});
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.user.name).toBe('John');
|
||||
expect(response.body.user.bio).toBe('Developer');
|
||||
});
|
||||
|
||||
test('should sanitize array input', async () => {
|
||||
const response = await request(app)
|
||||
.post('/test')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({
|
||||
items: [
|
||||
'<script>alert(1)</script>Hello',
|
||||
'<img src="x" onerror="alert(1)">World'
|
||||
]
|
||||
});
|
||||
expect(response.status).toBe(200);
|
||||
expect(response.body.items[0]).toBe('Hello');
|
||||
expect(response.body.items[1]).toBe('World');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting', () => {
|
||||
beforeEach(() => {
|
||||
SecurityMiddleware.clearRateLimits();
|
||||
});
|
||||
|
||||
test('should enforce regular rate limits', async () => {
|
||||
// Make 50 requests (should succeed)
|
||||
for (let i = 0; i < 50; i++) {
|
||||
const response = await request(app).get('/test');
|
||||
expect(response.status).toBe(200);
|
||||
}
|
||||
|
||||
// 51st request should fail
|
||||
const response = await request(app).get('/test');
|
||||
expect(response.status).toBe(429);
|
||||
expect(response.body.error).toBe(true);
|
||||
expect(response.body.message).toContain('Too many requests');
|
||||
});
|
||||
|
||||
test('should enforce stricter auth rate limits', async () => {
|
||||
// Make 3 auth requests (should succeed)
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const response = await request(app)
|
||||
.post('/auth/login')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({});
|
||||
expect(response.status).toBe(200);
|
||||
}
|
||||
|
||||
// 4th auth request should fail
|
||||
const response = await request(app)
|
||||
.post('/auth/login')
|
||||
.set('Content-Type', 'application/json')
|
||||
.send({});
|
||||
expect(response.status).toBe(429);
|
||||
expect(response.body.error).toBe(true);
|
||||
expect(response.body.message).toContain('Too many authentication requests');
|
||||
});
|
||||
});
|
||||
});
|
||||
32
src/config.js
Normal file
32
src/config.js
Normal file
@@ -0,0 +1,32 @@
|
||||
/**
|
||||
* MCP Server Configuration
|
||||
*
|
||||
* This file contains the configuration for the MCP server.
|
||||
* Values can be overridden via environment variables.
|
||||
*/
|
||||
|
||||
// Default values for the application configuration
|
||||
export const APP_CONFIG = {
|
||||
// Server configuration
|
||||
PORT: process.env.PORT ? parseInt(process.env.PORT, 10) : 3000,
|
||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
||||
|
||||
// Execution settings
|
||||
EXECUTION_TIMEOUT: process.env.EXECUTION_TIMEOUT ? parseInt(process.env.EXECUTION_TIMEOUT, 10) : 30000,
|
||||
STREAMING_ENABLED: process.env.STREAMING_ENABLED === 'true',
|
||||
|
||||
// Transport settings
|
||||
USE_STDIO_TRANSPORT: process.env.USE_STDIO_TRANSPORT === 'true',
|
||||
USE_HTTP_TRANSPORT: process.env.USE_HTTP_TRANSPORT !== 'false',
|
||||
|
||||
// Debug and logging settings
|
||||
DEBUG_MODE: process.env.DEBUG_MODE === 'true',
|
||||
DEBUG_STDIO: process.env.DEBUG_STDIO === 'true',
|
||||
DEBUG_HTTP: process.env.DEBUG_HTTP === 'true',
|
||||
SILENT_STARTUP: process.env.SILENT_STARTUP === 'true',
|
||||
|
||||
// CORS settings
|
||||
CORS_ORIGIN: process.env.CORS_ORIGIN || '*'
|
||||
};
|
||||
|
||||
export default APP_CONFIG;
|
||||
61
src/config.ts
Normal file
61
src/config.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
/**
|
||||
* Configuration for the Model Context Protocol (MCP) server
|
||||
* Values can be overridden using environment variables
|
||||
*/
|
||||
|
||||
import { MCPServerConfigSchema, MCPServerConfigType } from './schemas/config.schema.js';
|
||||
import { logger } from './utils/logger.js';
|
||||
|
||||
function loadConfig(): MCPServerConfigType {
|
||||
try {
|
||||
const rawConfig = {
|
||||
// Server configuration
|
||||
port: parseInt(process.env.PORT || '3000', 10),
|
||||
environment: process.env.NODE_ENV || 'development',
|
||||
|
||||
// Execution settings
|
||||
executionTimeout: parseInt(process.env.EXECUTION_TIMEOUT || '30000', 10),
|
||||
streamingEnabled: process.env.STREAMING_ENABLED === 'true',
|
||||
|
||||
// Transport settings
|
||||
useStdioTransport: process.env.USE_STDIO_TRANSPORT === 'true',
|
||||
useHttpTransport: process.env.USE_HTTP_TRANSPORT === 'true',
|
||||
|
||||
// Debug and logging
|
||||
debugMode: process.env.DEBUG_MODE === 'true',
|
||||
debugStdio: process.env.DEBUG_STDIO === 'true',
|
||||
debugHttp: process.env.DEBUG_HTTP === 'true',
|
||||
silentStartup: process.env.SILENT_STARTUP === 'true',
|
||||
|
||||
// CORS settings
|
||||
corsOrigin: process.env.CORS_ORIGIN || '*',
|
||||
|
||||
// Rate limiting
|
||||
rateLimit: {
|
||||
maxRequests: parseInt(process.env.RATE_LIMIT_MAX_REQUESTS || '100', 10),
|
||||
maxAuthRequests: parseInt(process.env.RATE_LIMIT_MAX_AUTH_REQUESTS || '5', 10),
|
||||
},
|
||||
};
|
||||
|
||||
// Validate and parse configuration
|
||||
const validatedConfig = MCPServerConfigSchema.parse(rawConfig);
|
||||
|
||||
// Log validation success
|
||||
if (!validatedConfig.silentStartup) {
|
||||
logger.info('Configuration validated successfully');
|
||||
if (validatedConfig.debugMode) {
|
||||
logger.debug('Current configuration:', validatedConfig);
|
||||
}
|
||||
}
|
||||
|
||||
return validatedConfig;
|
||||
} catch (error) {
|
||||
// Log validation errors
|
||||
logger.error('Configuration validation failed:', error);
|
||||
throw new Error('Invalid configuration. Please check your environment variables.');
|
||||
}
|
||||
}
|
||||
|
||||
export const APP_CONFIG = loadConfig();
|
||||
export type { MCPServerConfigType };
|
||||
export default APP_CONFIG;
|
||||
@@ -85,10 +85,16 @@ for (const envVar of requiredEnvVars) {
|
||||
}
|
||||
}
|
||||
|
||||
// Fix NODE_ENV if it's set to "1"
|
||||
if (process.env.NODE_ENV === "1") {
|
||||
console.log('Fixing NODE_ENV from "1" to "development"');
|
||||
process.env.NODE_ENV = "development";
|
||||
}
|
||||
|
||||
// Load and validate configuration
|
||||
export const APP_CONFIG = AppConfigSchema.parse({
|
||||
PORT: process.env.PORT || 4000,
|
||||
NODE_ENV: process.env.NODE_ENV || "development",
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
HASS_HOST: process.env.HASS_HOST || "http://192.168.178.63:8123",
|
||||
HASS_TOKEN: process.env.HASS_TOKEN,
|
||||
JWT_SECRET: process.env.JWT_SECRET || "your-secret-key",
|
||||
|
||||
309
src/index.ts
309
src/index.ts
@@ -1,157 +1,184 @@
|
||||
import { file } from "bun";
|
||||
import { Elysia } from "elysia";
|
||||
import { cors } from "@elysiajs/cors";
|
||||
import { swagger } from "@elysiajs/swagger";
|
||||
import {
|
||||
rateLimiter,
|
||||
securityHeaders,
|
||||
validateRequest,
|
||||
sanitizeInput,
|
||||
errorHandler,
|
||||
} from "./security/index.js";
|
||||
import {
|
||||
get_hass,
|
||||
call_service,
|
||||
list_devices,
|
||||
get_states,
|
||||
get_state,
|
||||
} from "./hass/index.js";
|
||||
import { z } from "zod";
|
||||
import {
|
||||
commonCommands,
|
||||
coverCommands,
|
||||
climateCommands,
|
||||
type Command,
|
||||
} from "./commands.js";
|
||||
import { speechService } from "./speech/index.js";
|
||||
import { APP_CONFIG } from "./config/app.config.js";
|
||||
import { loadEnvironmentVariables } from "./config/loadEnv.js";
|
||||
import { MCP_SCHEMA } from "./mcp/schema.js";
|
||||
import {
|
||||
listDevicesTool,
|
||||
controlTool,
|
||||
subscribeEventsTool,
|
||||
getSSEStatsTool,
|
||||
automationConfigTool,
|
||||
addonTool,
|
||||
packageTool,
|
||||
sceneTool,
|
||||
notifyTool,
|
||||
historyTool,
|
||||
} from "./tools/index.js";
|
||||
/**
|
||||
* Home Assistant Model Context Protocol (MCP) Server
|
||||
* A standardized protocol for AI tools to interact with Home Assistant
|
||||
*/
|
||||
|
||||
// Load environment variables based on NODE_ENV
|
||||
await loadEnvironmentVariables();
|
||||
import express from 'express';
|
||||
import cors from 'cors';
|
||||
import swaggerUi from 'swagger-ui-express';
|
||||
import { MCPServer } from './mcp/MCPServer.js';
|
||||
import { loggingMiddleware, timeoutMiddleware } from './mcp/middleware/index.js';
|
||||
import { StdioTransport } from './mcp/transports/stdio.transport.js';
|
||||
import { HttpTransport } from './mcp/transports/http.transport.js';
|
||||
import { APP_CONFIG } from './config.js';
|
||||
import { logger } from "./utils/logger.js";
|
||||
import { openApiConfig } from './openapi.js';
|
||||
import { apiLimiter, authLimiter } from './middleware/rate-limit.middleware.js';
|
||||
import { SecurityMiddleware } from './security/enhanced-middleware.js';
|
||||
|
||||
// Configuration
|
||||
const HASS_TOKEN = process.env.HASS_TOKEN;
|
||||
const PORT = parseInt(process.env.PORT || "4000", 10);
|
||||
// Home Assistant tools
|
||||
import { LightsControlTool } from './tools/homeassistant/lights.tool.js';
|
||||
import { ClimateControlTool } from './tools/homeassistant/climate.tool.js';
|
||||
|
||||
console.log("Initializing Home Assistant connection...");
|
||||
// Home Assistant optional tools - these can be added as needed
|
||||
// import { ControlTool } from './tools/control.tool.js';
|
||||
// import { SceneTool } from './tools/scene.tool.js';
|
||||
// import { AutomationTool } from './tools/automation.tool.js';
|
||||
// import { NotifyTool } from './tools/notify.tool.js';
|
||||
// import { ListDevicesTool } from './tools/list-devices.tool.js';
|
||||
// import { HistoryTool } from './tools/history.tool.js';
|
||||
|
||||
// Define Tool interface and export it
|
||||
export interface Tool {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters: z.ZodType<any>;
|
||||
execute: (params: any) => Promise<any>;
|
||||
/**
|
||||
* Check if running in stdio mode via command line args
|
||||
*/
|
||||
function isStdioMode(): boolean {
|
||||
return process.argv.includes('--stdio');
|
||||
}
|
||||
|
||||
// Array to store tools
|
||||
const tools: Tool[] = [
|
||||
listDevicesTool,
|
||||
controlTool,
|
||||
subscribeEventsTool,
|
||||
getSSEStatsTool,
|
||||
automationConfigTool,
|
||||
addonTool,
|
||||
packageTool,
|
||||
sceneTool,
|
||||
notifyTool,
|
||||
historyTool,
|
||||
];
|
||||
/**
|
||||
* Main function to start the MCP server
|
||||
*/
|
||||
async function main() {
|
||||
logger.info('Starting Home Assistant MCP Server...');
|
||||
|
||||
// Initialize Elysia app with middleware
|
||||
const app = new Elysia()
|
||||
.use(cors())
|
||||
.use(swagger())
|
||||
.use(rateLimiter)
|
||||
.use(securityHeaders)
|
||||
.use(validateRequest)
|
||||
.use(sanitizeInput)
|
||||
.use(errorHandler);
|
||||
// Check if we're in stdio mode from command line
|
||||
const useStdio = isStdioMode() || APP_CONFIG.useStdioTransport;
|
||||
|
||||
// Mount API routes
|
||||
app.get("/api/mcp/schema", () => MCP_SCHEMA);
|
||||
// Configure server
|
||||
const EXECUTION_TIMEOUT = APP_CONFIG.executionTimeout;
|
||||
const STREAMING_ENABLED = APP_CONFIG.streamingEnabled;
|
||||
|
||||
app.post("/api/mcp/execute", async ({ body }: { body: { name: string; parameters: Record<string, unknown> } }) => {
|
||||
const { name: toolName, parameters } = body;
|
||||
const tool = tools.find((t) => t.name === toolName);
|
||||
// Get the server instance (singleton)
|
||||
const server = MCPServer.getInstance();
|
||||
|
||||
if (!tool) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Tool '${toolName}' not found`,
|
||||
};
|
||||
}
|
||||
// Register Home Assistant tools
|
||||
server.registerTool(new LightsControlTool());
|
||||
server.registerTool(new ClimateControlTool());
|
||||
|
||||
try {
|
||||
const result = await tool.execute(parameters);
|
||||
return {
|
||||
success: true,
|
||||
result,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
});
|
||||
// Add optional tools here as needed
|
||||
// server.registerTool(new ControlTool());
|
||||
// server.registerTool(new SceneTool());
|
||||
// server.registerTool(new NotifyTool());
|
||||
// server.registerTool(new ListDevicesTool());
|
||||
// server.registerTool(new HistoryTool());
|
||||
|
||||
// Health check endpoint with MCP info
|
||||
app.get("/api/mcp/health", () => ({
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: "1.0.0",
|
||||
mcp_version: "1.0",
|
||||
supported_tools: tools.map(t => t.name),
|
||||
speech_enabled: APP_CONFIG.SPEECH.ENABLED,
|
||||
wake_word_enabled: APP_CONFIG.SPEECH.WAKE_WORD_ENABLED,
|
||||
speech_to_text_enabled: APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED,
|
||||
}));
|
||||
// Add middlewares
|
||||
server.use(loggingMiddleware);
|
||||
server.use(timeoutMiddleware(EXECUTION_TIMEOUT));
|
||||
|
||||
// Initialize speech service if enabled
|
||||
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||
console.log("Initializing speech service...");
|
||||
speechService.initialize().catch((error) => {
|
||||
console.error("Failed to initialize speech service:", error);
|
||||
});
|
||||
}
|
||||
// Initialize transports
|
||||
if (useStdio) {
|
||||
logger.info('Using Standard I/O transport');
|
||||
|
||||
// Create API endpoints for each tool
|
||||
tools.forEach((tool) => {
|
||||
app.post(`/api/tools/${tool.name}`, async ({ body }: { body: Record<string, unknown> }) => {
|
||||
const result = await tool.execute(body);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
// Start the server
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Server is running on port ${PORT}`);
|
||||
});
|
||||
|
||||
// Handle server shutdown
|
||||
process.on("SIGTERM", async () => {
|
||||
console.log("Received SIGTERM. Shutting down gracefully...");
|
||||
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||
await speechService.shutdown().catch((error) => {
|
||||
console.error("Error shutting down speech service:", error);
|
||||
// Create and configure the stdio transport with debug enabled for stdio mode
|
||||
const stdioTransport = new StdioTransport({
|
||||
debug: true, // Always enable debug in stdio mode for better visibility
|
||||
silent: false // Never be silent in stdio mode
|
||||
});
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Export tools for testing purposes
|
||||
export { tools };
|
||||
// Explicitly set the server reference to ensure access to tools
|
||||
stdioTransport.setServer(server);
|
||||
|
||||
// Register the transport
|
||||
server.registerTransport(stdioTransport);
|
||||
|
||||
// Special handling for stdio mode - don't start other transports
|
||||
if (isStdioMode()) {
|
||||
logger.info('Running in pure stdio mode (from CLI)');
|
||||
// Start the server
|
||||
await server.start();
|
||||
logger.info('MCP Server started successfully');
|
||||
|
||||
// Handle shutdown
|
||||
const shutdown = async () => {
|
||||
logger.info('Shutting down MCP Server...');
|
||||
try {
|
||||
await server.shutdown();
|
||||
logger.info('MCP Server shutdown complete');
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('Error during shutdown:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Register shutdown handlers
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('SIGTERM', shutdown);
|
||||
|
||||
// Exit the function early as we're in stdio-only mode
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// HTTP transport (only if not in pure stdio mode)
|
||||
if (APP_CONFIG.useHttpTransport) {
|
||||
logger.info('Using HTTP transport on port ' + APP_CONFIG.port);
|
||||
const app = express();
|
||||
|
||||
// Apply enhanced security middleware
|
||||
app.use(SecurityMiddleware.applySecurityHeaders);
|
||||
|
||||
// CORS configuration
|
||||
app.use(cors({
|
||||
origin: APP_CONFIG.corsOrigin,
|
||||
methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization'],
|
||||
maxAge: 86400 // 24 hours
|
||||
}));
|
||||
|
||||
// Apply rate limiting to all routes
|
||||
app.use('/api', apiLimiter);
|
||||
app.use('/auth', authLimiter);
|
||||
|
||||
// Swagger UI setup
|
||||
app.use('/api-docs', swaggerUi.serve, swaggerUi.setup(openApiConfig, {
|
||||
explorer: true,
|
||||
customCss: '.swagger-ui .topbar { display: none }',
|
||||
customSiteTitle: 'Home Assistant MCP API Documentation'
|
||||
}));
|
||||
|
||||
// Health check endpoint
|
||||
app.get('/health', (req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
version: process.env.npm_package_version || '1.0.0'
|
||||
});
|
||||
});
|
||||
|
||||
const httpTransport = new HttpTransport({
|
||||
port: APP_CONFIG.port,
|
||||
corsOrigin: APP_CONFIG.corsOrigin,
|
||||
apiPrefix: "/api/mcp",
|
||||
debug: APP_CONFIG.debugHttp
|
||||
});
|
||||
server.registerTransport(httpTransport);
|
||||
}
|
||||
|
||||
// Start the server
|
||||
await server.start();
|
||||
logger.info('MCP Server started successfully');
|
||||
|
||||
// Handle shutdown
|
||||
const shutdown = async () => {
|
||||
logger.info('Shutting down MCP Server...');
|
||||
try {
|
||||
await server.shutdown();
|
||||
logger.info('MCP Server shutdown complete');
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
logger.error('Error during shutdown:', error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
// Register shutdown handlers
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('SIGTERM', shutdown);
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
main().catch(error => {
|
||||
logger.error('Error starting MCP Server:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
105
src/mcp/BaseTool.ts
Normal file
105
src/mcp/BaseTool.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
/**
|
||||
* Base Tool Implementation for MCP
|
||||
*
|
||||
* This base class provides the foundation for all tools in the MCP implementation,
|
||||
* with typed parameters, validation, and error handling.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { ToolDefinition, ToolMetadata, MCPResponseStream } from './types.js';
|
||||
|
||||
/**
|
||||
* Configuration options for creating a tool
|
||||
*/
|
||||
export interface ToolOptions<P = unknown> {
|
||||
name: string;
|
||||
description: string;
|
||||
version: string;
|
||||
parameters?: z.ZodType<P>;
|
||||
metadata?: ToolMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base class for all MCP tools
|
||||
*
|
||||
* Provides:
|
||||
* - Parameter validation with Zod
|
||||
* - Error handling
|
||||
* - Streaming support
|
||||
* - Type safety
|
||||
*/
|
||||
export abstract class BaseTool<P = unknown, R = unknown> implements ToolDefinition {
|
||||
public readonly name: string;
|
||||
public readonly description: string;
|
||||
public readonly parameters?: z.ZodType<P>;
|
||||
public readonly metadata: ToolMetadata;
|
||||
|
||||
/**
|
||||
* Create a new tool
|
||||
*/
|
||||
constructor(options: ToolOptions<P>) {
|
||||
this.name = options.name;
|
||||
this.description = options.description;
|
||||
this.parameters = options.parameters;
|
||||
this.metadata = {
|
||||
version: options.version,
|
||||
category: options.metadata?.category || 'general',
|
||||
tags: options.metadata?.tags || [],
|
||||
examples: options.metadata?.examples || [],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool with the given parameters
|
||||
*
|
||||
* @param params The validated parameters for the tool
|
||||
* @param stream Optional stream for sending partial results
|
||||
* @returns The result of the tool execution
|
||||
*/
|
||||
abstract execute(params: P, stream?: MCPResponseStream): Promise<R>;
|
||||
|
||||
/**
|
||||
* Get the parameter schema as JSON schema
|
||||
*/
|
||||
public getParameterSchema(): Record<string, unknown> | undefined {
|
||||
if (!this.parameters) return undefined;
|
||||
return this.parameters.isOptional()
|
||||
? { type: 'object', properties: {} }
|
||||
: this.parameters.shape;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tool definition for registration
|
||||
*/
|
||||
public getDefinition(): ToolDefinition {
|
||||
return {
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
parameters: this.parameters,
|
||||
metadata: this.metadata
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate parameters against the schema
|
||||
*
|
||||
* @param params Parameters to validate
|
||||
* @returns Validated parameters
|
||||
* @throws Error if validation fails
|
||||
*/
|
||||
protected validateParams(params: unknown): P {
|
||||
if (!this.parameters) {
|
||||
return {} as P;
|
||||
}
|
||||
|
||||
try {
|
||||
return this.parameters.parse(params);
|
||||
} catch (error) {
|
||||
if (error instanceof z.ZodError) {
|
||||
const issues = error.issues.map(issue => `${issue.path.join('.')}: ${issue.message}`).join(', ');
|
||||
throw new Error(`Parameter validation failed: ${issues}`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
453
src/mcp/MCPServer.ts
Normal file
453
src/mcp/MCPServer.ts
Normal file
@@ -0,0 +1,453 @@
|
||||
/**
|
||||
* MCPServer.ts
|
||||
*
|
||||
* Core implementation of the Model Context Protocol server.
|
||||
* This class manages tool registration, execution, and resource handling
|
||||
* while providing integration with various transport layers.
|
||||
*/
|
||||
|
||||
import { EventEmitter } from "events";
|
||||
import { z } from "zod";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { logger } from "../utils/logger.js";
|
||||
|
||||
// Error code enum to break circular dependency
|
||||
export enum MCPErrorCode {
|
||||
// Standard JSON-RPC 2.0 error codes
|
||||
PARSE_ERROR = -32700,
|
||||
INVALID_REQUEST = -32600,
|
||||
METHOD_NOT_FOUND = -32601,
|
||||
INVALID_PARAMS = -32602,
|
||||
INTERNAL_ERROR = -32603,
|
||||
|
||||
// Custom MCP error codes
|
||||
TOOL_EXECUTION_ERROR = -32000,
|
||||
VALIDATION_ERROR = -32001,
|
||||
RESOURCE_NOT_FOUND = -32002,
|
||||
RESOURCE_BUSY = -32003,
|
||||
TIMEOUT = -32004,
|
||||
CANCELED = -32005,
|
||||
AUTHENTICATION_ERROR = -32006,
|
||||
AUTHORIZATION_ERROR = -32007,
|
||||
TRANSPORT_ERROR = -32008,
|
||||
STREAMING_ERROR = -32009
|
||||
}
|
||||
|
||||
// Server events enum to break circular dependency
|
||||
export enum MCPServerEvents {
|
||||
STARTING = "starting",
|
||||
STARTED = "started",
|
||||
SHUTTING_DOWN = "shuttingDown",
|
||||
SHUTDOWN = "shutdown",
|
||||
REQUEST_RECEIVED = "requestReceived",
|
||||
RESPONSE_SENT = "responseSent",
|
||||
RESPONSE_ERROR = "responseError",
|
||||
TOOL_REGISTERED = "toolRegistered",
|
||||
TRANSPORT_REGISTERED = "transportRegistered",
|
||||
CONFIG_UPDATED = "configUpdated"
|
||||
}
|
||||
|
||||
// Forward declarations to break circular dependency
|
||||
import type {
|
||||
ToolDefinition,
|
||||
MCPMiddleware,
|
||||
MCPRequest,
|
||||
MCPResponse,
|
||||
MCPContext,
|
||||
TransportLayer,
|
||||
MCPConfig,
|
||||
ResourceManager
|
||||
} from "./types.js";
|
||||
|
||||
/**
|
||||
* Main Model Context Protocol server class
|
||||
*/
|
||||
export class MCPServer extends EventEmitter {
|
||||
private static instance: MCPServer;
|
||||
private tools: Map<string, ToolDefinition> = new Map();
|
||||
private middlewares: MCPMiddleware[] = [];
|
||||
private transports: TransportLayer[] = [];
|
||||
private resourceManager: ResourceManager;
|
||||
private config: MCPConfig;
|
||||
private resources: Map<string, Map<string, any>> = new Map();
|
||||
|
||||
/**
|
||||
* Private constructor for singleton pattern
|
||||
*/
|
||||
private constructor(config: Partial<MCPConfig> = {}) {
|
||||
super();
|
||||
this.config = {
|
||||
maxRetries: 3,
|
||||
retryDelay: 1000,
|
||||
executionTimeout: 30000,
|
||||
streamingEnabled: true,
|
||||
maxPayloadSize: 10 * 1024 * 1024, // 10MB
|
||||
...config
|
||||
};
|
||||
|
||||
this.resourceManager = {
|
||||
acquire: this.acquireResource.bind(this),
|
||||
release: this.releaseResource.bind(this),
|
||||
list: this.listResources.bind(this)
|
||||
};
|
||||
|
||||
// Initialize with default middlewares
|
||||
this.use(this.validationMiddleware.bind(this));
|
||||
this.use(this.errorHandlingMiddleware.bind(this));
|
||||
|
||||
logger.info("MCP Server initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Get singleton instance
|
||||
*/
|
||||
public static getInstance(config?: Partial<MCPConfig>): MCPServer {
|
||||
if (!MCPServer.instance) {
|
||||
MCPServer.instance = new MCPServer(config);
|
||||
} else if (config) {
|
||||
MCPServer.instance.configure(config);
|
||||
}
|
||||
return MCPServer.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update server configuration
|
||||
*/
|
||||
public configure(config: Partial<MCPConfig>): void {
|
||||
this.config = {
|
||||
...this.config,
|
||||
...config
|
||||
};
|
||||
logger.debug("MCP Server configuration updated", { config });
|
||||
this.emit(MCPServerEvents.CONFIG_UPDATED, this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a new tool with the server
|
||||
*/
|
||||
public registerTool(tool: ToolDefinition): void {
|
||||
if (this.tools.has(tool.name)) {
|
||||
logger.warn(`Tool '${tool.name}' is already registered. Overwriting.`);
|
||||
}
|
||||
|
||||
this.tools.set(tool.name, tool);
|
||||
logger.debug(`Tool '${tool.name}' registered`);
|
||||
this.emit(MCPServerEvents.TOOL_REGISTERED, tool);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register multiple tools at once
|
||||
*/
|
||||
public registerTools(tools: ToolDefinition[]): void {
|
||||
tools.forEach(tool => this.registerTool(tool));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a tool by name
|
||||
*/
|
||||
public getTool(name: string): ToolDefinition | undefined {
|
||||
return this.tools.get(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered tools
|
||||
*/
|
||||
public getAllTools(): ToolDefinition[] {
|
||||
return Array.from(this.tools.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a transport layer
|
||||
*/
|
||||
public registerTransport(transport: TransportLayer): void {
|
||||
this.transports.push(transport);
|
||||
transport.initialize(this.handleRequest.bind(this));
|
||||
logger.debug(`Transport '${transport.name}' registered`);
|
||||
this.emit(MCPServerEvents.TRANSPORT_REGISTERED, transport);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a middleware to the pipeline
|
||||
*/
|
||||
public use(middleware: MCPMiddleware): void {
|
||||
this.middlewares.push(middleware);
|
||||
logger.debug("Middleware added");
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an incoming request through the middleware pipeline
|
||||
*/
|
||||
public async handleRequest(request: MCPRequest): Promise<MCPResponse> {
|
||||
const context: MCPContext = {
|
||||
requestId: request.id ?? uuidv4(),
|
||||
startTime: Date.now(),
|
||||
resourceManager: this.resourceManager,
|
||||
tools: this.tools,
|
||||
config: this.config,
|
||||
logger: logger.child({ requestId: request.id }),
|
||||
server: this,
|
||||
state: new Map()
|
||||
};
|
||||
|
||||
logger.debug(`Handling request: ${context.requestId}`, { method: request.method });
|
||||
this.emit(MCPServerEvents.REQUEST_RECEIVED, request, context);
|
||||
|
||||
let index = 0;
|
||||
const next = async (): Promise<MCPResponse> => {
|
||||
if (index < this.middlewares.length) {
|
||||
const middleware = this.middlewares[index++];
|
||||
return middleware(request, context, next);
|
||||
} else {
|
||||
return this.executeRequest(request, context);
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await next();
|
||||
this.emit(MCPServerEvents.RESPONSE_SENT, response, context);
|
||||
return response;
|
||||
} catch (error) {
|
||||
const errorResponse: MCPResponse = {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: error instanceof Error ? error.message : String(error)
|
||||
}
|
||||
};
|
||||
this.emit(MCPServerEvents.RESPONSE_ERROR, errorResponse, context);
|
||||
return errorResponse;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a tool request after middleware processing
|
||||
*/
|
||||
private async executeRequest(request: MCPRequest, context: MCPContext): Promise<MCPResponse> {
|
||||
const { method, params = {} } = request;
|
||||
|
||||
// Special case for internal context retrieval (used by transports for initialization)
|
||||
if (method === "_internal_getContext") {
|
||||
return {
|
||||
id: request.id,
|
||||
result: {
|
||||
context: context,
|
||||
tools: Array.from(this.tools.values()).map(tool => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
metadata: tool.metadata
|
||||
}))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const tool = this.tools.get(method);
|
||||
if (!tool) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.METHOD_NOT_FOUND,
|
||||
message: `Method not found: ${method}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await tool.execute(params, context);
|
||||
return {
|
||||
id: request.id,
|
||||
result
|
||||
};
|
||||
} catch (error) {
|
||||
logger.error(`Error executing tool ${method}:`, error);
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.TOOL_EXECUTION_ERROR,
|
||||
message: error instanceof Error ? error.message : String(error)
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validation middleware
|
||||
*/
|
||||
private async validationMiddleware(
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> {
|
||||
const { method, params = {} } = request;
|
||||
|
||||
const tool = this.tools.get(method);
|
||||
if (!tool) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.METHOD_NOT_FOUND,
|
||||
message: `Method not found: ${method}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (tool.parameters && params) {
|
||||
try {
|
||||
// Validate parameters with the schema
|
||||
const validParams = tool.parameters.parse(params);
|
||||
// Update with validated params (which may include defaults)
|
||||
request.params = validParams;
|
||||
} catch (validationError) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.INVALID_PARAMS,
|
||||
message: "Invalid parameters",
|
||||
data: validationError instanceof Error ? validationError.message : String(validationError)
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Error handling middleware
|
||||
*/
|
||||
private async errorHandlingMiddleware(
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> {
|
||||
try {
|
||||
return await next();
|
||||
} catch (error) {
|
||||
logger.error(`Uncaught error in request pipeline:`, error);
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: error instanceof Error ? error.message : "An unknown error occurred",
|
||||
data: error instanceof Error ? { name: error.name, stack: error.stack } : undefined
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Resource acquisition
|
||||
*/
|
||||
private async acquireResource(resourceType: string, resourceId: string, context: MCPContext): Promise<any> {
|
||||
logger.debug(`Acquiring resource: ${resourceType}/${resourceId}`);
|
||||
|
||||
// Initialize resource type map if not exists
|
||||
if (!this.resources.has(resourceType)) {
|
||||
this.resources.set(resourceType, new Map());
|
||||
}
|
||||
|
||||
const typeResources = this.resources.get(resourceType);
|
||||
|
||||
// Create resource if it doesn't exist
|
||||
if (!typeResources.has(resourceId)) {
|
||||
// Create a placeholder for the resource
|
||||
const resourceData = {
|
||||
id: resourceId,
|
||||
type: resourceType,
|
||||
createdAt: Date.now(),
|
||||
data: {}
|
||||
};
|
||||
|
||||
// Store the resource
|
||||
typeResources.set(resourceId, resourceData);
|
||||
|
||||
// Log resource creation
|
||||
await Promise.resolve(); // Add await to satisfy linter
|
||||
logger.debug(`Created new resource: ${resourceType}/${resourceId}`);
|
||||
|
||||
return resourceData;
|
||||
}
|
||||
|
||||
// Return existing resource
|
||||
return typeResources.get(resourceId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Resource release
|
||||
*/
|
||||
private async releaseResource(resourceType: string, resourceId: string, context: MCPContext): Promise<void> {
|
||||
logger.debug(`Releasing resource: ${resourceType}/${resourceId}`);
|
||||
|
||||
// Check if type exists
|
||||
if (!this.resources.has(resourceType)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const typeResources = this.resources.get(resourceType);
|
||||
|
||||
// Remove resource if it exists
|
||||
if (typeResources.has(resourceId)) {
|
||||
await Promise.resolve(); // Add await to satisfy linter
|
||||
typeResources.delete(resourceId);
|
||||
logger.debug(`Released resource: ${resourceType}/${resourceId}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List available resources
|
||||
*/
|
||||
private async listResources(context: MCPContext, resourceType?: string): Promise<string[]> {
|
||||
if (resourceType) {
|
||||
logger.debug(`Listing resources of type ${resourceType}`);
|
||||
|
||||
if (!this.resources.has(resourceType)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
await Promise.resolve(); // Add await to satisfy linter
|
||||
return Array.from(this.resources.get(resourceType).keys());
|
||||
} else {
|
||||
logger.debug('Listing all resource types');
|
||||
await Promise.resolve(); // Add await to satisfy linter
|
||||
return Array.from(this.resources.keys());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the server
|
||||
*/
|
||||
public async start(): Promise<void> {
|
||||
logger.info("Starting MCP Server");
|
||||
this.emit(MCPServerEvents.STARTING);
|
||||
|
||||
// Start all transports
|
||||
for (const transport of this.transports) {
|
||||
await transport.start();
|
||||
}
|
||||
|
||||
this.emit(MCPServerEvents.STARTED);
|
||||
logger.info("MCP Server started");
|
||||
}
|
||||
|
||||
/**
|
||||
* Gracefully shut down the server
|
||||
*/
|
||||
public async shutdown(): Promise<void> {
|
||||
logger.info("Shutting down MCP Server");
|
||||
this.emit(MCPServerEvents.SHUTTING_DOWN);
|
||||
|
||||
// Stop all transports
|
||||
for (const transport of this.transports) {
|
||||
await transport.stop();
|
||||
}
|
||||
|
||||
// Clear resources
|
||||
this.tools.clear();
|
||||
this.middlewares = [];
|
||||
this.transports = [];
|
||||
this.resources.clear();
|
||||
|
||||
this.emit(MCPServerEvents.SHUTDOWN);
|
||||
this.removeAllListeners();
|
||||
logger.info("MCP Server shut down");
|
||||
}
|
||||
}
|
||||
153
src/mcp/index.ts
Normal file
153
src/mcp/index.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
/**
|
||||
* MCP - Model Context Protocol Implementation
|
||||
*
|
||||
* This is the main entry point for the MCP implementation.
|
||||
* It exports all the components needed to use the MCP.
|
||||
*/
|
||||
|
||||
// Core MCP components
|
||||
export * from './MCPServer.js';
|
||||
export * from './types.js';
|
||||
export * from './BaseTool.js';
|
||||
|
||||
// Middleware
|
||||
export * from './middleware/index.js';
|
||||
|
||||
// Transports
|
||||
export * from './transports/stdio.transport.js';
|
||||
export * from './transports/http.transport.js';
|
||||
|
||||
// Utilities for AI assistants
|
||||
export * from './utils/claude.js';
|
||||
export * from './utils/cursor.js';
|
||||
export * from './utils/error.js';
|
||||
|
||||
// Helper function to create Claude-compatible tool definitions
|
||||
export function createClaudeToolDefinitions(tools: any[]): any[] {
|
||||
return tools.map(tool => {
|
||||
// Convert Zod schema to JSON Schema
|
||||
const parameters = tool.parameters ? {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: []
|
||||
} : {
|
||||
type: 'object',
|
||||
properties: {},
|
||||
required: []
|
||||
};
|
||||
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
// Helper function to create Cursor-compatible tool definitions
|
||||
export function createCursorToolDefinitions(tools: any[]): any[] {
|
||||
return tools.map(tool => {
|
||||
// Convert to Cursor format
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: {}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Model Context Protocol (MCP) Module
|
||||
*
|
||||
* This module provides the core MCP server implementation along with
|
||||
* tools, transports, and utilities for integrating with Claude and Cursor.
|
||||
*/
|
||||
|
||||
// Export server implementation
|
||||
export { MCPServer } from "./MCPServer.js";
|
||||
|
||||
// Export type definitions
|
||||
export * from "./types.js";
|
||||
|
||||
// Export transport layers
|
||||
export { StdioTransport } from "./transports/stdio.transport.js";
|
||||
|
||||
// Re-export tools base class
|
||||
export { BaseTool } from "../tools/base-tool.js";
|
||||
|
||||
// Re-export middleware
|
||||
export * from "./middleware/index.js";
|
||||
|
||||
// Import types for proper type definitions
|
||||
import { MCPServer } from "./MCPServer.js";
|
||||
import { StdioTransport } from "./transports/stdio.transport.js";
|
||||
import { ToolDefinition } from "./types.js";
|
||||
|
||||
/**
|
||||
* Utility function to create Claude-compatible function definitions
|
||||
*/
|
||||
export function createClaudeFunctions(tools: ToolDefinition[]): any[] {
|
||||
return tools.map(tool => {
|
||||
// If the tool has a toSchemaObject method, use it
|
||||
if ('toSchemaObject' in tool && typeof tool.toSchemaObject === 'function') {
|
||||
return tool.toSchemaObject();
|
||||
}
|
||||
|
||||
// Otherwise, manually convert the tool to a Claude function
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: (tool as any).parameters?.properties || {},
|
||||
required: (tool as any).parameters?.required || []
|
||||
}
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility function to create Cursor-compatible tool definitions
|
||||
*/
|
||||
export function createCursorTools(tools: ToolDefinition[]): any[] {
|
||||
return tools.map(tool => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: Object.entries((tool as any).parameters?.properties || {}).reduce((acc, [key, value]) => {
|
||||
const param = value as any;
|
||||
acc[key] = {
|
||||
type: param.type || 'string',
|
||||
description: param.description || '',
|
||||
required: ((tool as any).parameters?.required || []).includes(key)
|
||||
};
|
||||
return acc;
|
||||
}, {} as Record<string, any>)
|
||||
}));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a standalone MCP server with stdio transport
|
||||
*/
|
||||
export function createStdioServer(options: {
|
||||
silent?: boolean;
|
||||
debug?: boolean;
|
||||
tools?: ToolDefinition[];
|
||||
} = {}): { server: MCPServer; transport: StdioTransport } {
|
||||
// Create server instance
|
||||
const server = MCPServer.getInstance();
|
||||
|
||||
// Create and register stdio transport
|
||||
const transport = new StdioTransport({
|
||||
silent: options.silent,
|
||||
debug: options.debug
|
||||
});
|
||||
|
||||
server.registerTransport(transport);
|
||||
|
||||
// Register tools if provided
|
||||
if (options.tools && Array.isArray(options.tools)) {
|
||||
server.registerTools(options.tools);
|
||||
}
|
||||
|
||||
return { server, transport };
|
||||
}
|
||||
172
src/mcp/middleware/index.ts
Normal file
172
src/mcp/middleware/index.ts
Normal file
@@ -0,0 +1,172 @@
|
||||
/**
|
||||
* MCP Middleware System
|
||||
*
|
||||
* This module provides middleware functionality for the MCP server,
|
||||
* allowing for request/response processing pipelines.
|
||||
*/
|
||||
|
||||
import { MCPMiddleware, MCPRequest, MCPResponse, MCPContext, MCPErrorCode } from "../types.js";
|
||||
import { logger } from "../../utils/logger.js";
|
||||
|
||||
/**
|
||||
* Middleware for validating requests against JSON Schema
|
||||
*/
|
||||
export const validationMiddleware: MCPMiddleware = async (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> => {
|
||||
const { method } = request;
|
||||
|
||||
const tool = context.tools.get(method);
|
||||
if (!tool) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.METHOD_NOT_FOUND,
|
||||
message: `Method not found: ${method}`
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (tool.parameters && request.params) {
|
||||
try {
|
||||
// Zod validation happens here
|
||||
const validatedParams = tool.parameters.parse(request.params);
|
||||
request.params = validatedParams;
|
||||
} catch (error) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.INVALID_PARAMS,
|
||||
message: "Invalid parameters",
|
||||
data: error instanceof Error ? error.message : String(error)
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware for handling authentication
|
||||
*/
|
||||
export const authMiddleware = (authKey: string): MCPMiddleware => {
|
||||
return async (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> => {
|
||||
// Check for authentication in params
|
||||
const authToken = (request.params)?.auth_token;
|
||||
|
||||
if (!authToken || authToken !== authKey) {
|
||||
return {
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.AUTHENTICATION_ERROR,
|
||||
message: "Authentication failed"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Remove auth token from params to keep them clean
|
||||
if (request.params && typeof request.params === 'object') {
|
||||
const { auth_token, ...cleanParams } = request.params;
|
||||
request.params = cleanParams;
|
||||
}
|
||||
|
||||
return next();
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware for logging requests and responses
|
||||
*/
|
||||
export const loggingMiddleware: MCPMiddleware = async (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> => {
|
||||
const startTime = Date.now();
|
||||
logger.debug(`MCP Request: ${request.method}`, {
|
||||
id: request.id,
|
||||
method: request.method
|
||||
});
|
||||
|
||||
try {
|
||||
const response = await next();
|
||||
|
||||
const duration = Date.now() - startTime;
|
||||
logger.debug(`MCP Response: ${request.method}`, {
|
||||
id: request.id,
|
||||
method: request.method,
|
||||
success: !response.error,
|
||||
duration
|
||||
});
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
logger.error(`MCP Error: ${request.method}`, {
|
||||
id: request.id,
|
||||
method: request.method,
|
||||
error,
|
||||
duration
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Middleware for handling timeouts
|
||||
*/
|
||||
export const timeoutMiddleware = (timeoutMs: number): MCPMiddleware => {
|
||||
return async (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> => {
|
||||
return Promise.race([
|
||||
next(),
|
||||
new Promise<MCPResponse>((resolve) => {
|
||||
setTimeout(() => {
|
||||
resolve({
|
||||
id: request.id,
|
||||
error: {
|
||||
code: MCPErrorCode.TIMEOUT,
|
||||
message: `Request timed out after ${timeoutMs}ms`
|
||||
}
|
||||
});
|
||||
}, timeoutMs);
|
||||
})
|
||||
]);
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Utility to combine multiple middlewares
|
||||
*/
|
||||
export function combineMiddlewares(middlewares: MCPMiddleware[]): MCPMiddleware {
|
||||
return async (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
): Promise<MCPResponse> => {
|
||||
// Create a function that runs through all middlewares
|
||||
let index = 0;
|
||||
|
||||
const runMiddleware = async (): Promise<MCPResponse> => {
|
||||
if (index < middlewares.length) {
|
||||
const middleware = middlewares[index++];
|
||||
return middleware(request, context, runMiddleware);
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
};
|
||||
|
||||
return runMiddleware();
|
||||
};
|
||||
}
|
||||
42
src/mcp/transport.ts
Normal file
42
src/mcp/transport.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/**
|
||||
* Base Transport for MCP
|
||||
*
|
||||
* This module provides a base class for all transport implementations.
|
||||
*/
|
||||
|
||||
import { TransportLayer, MCPRequest, MCPResponse, MCPStreamPart, MCPNotification } from "./types.js";
|
||||
|
||||
/**
|
||||
* Abstract base class for all transports
|
||||
*/
|
||||
export abstract class BaseTransport implements TransportLayer {
|
||||
public name: string = "base";
|
||||
protected handler: ((request: MCPRequest) => Promise<MCPResponse>) | null = null;
|
||||
|
||||
/**
|
||||
* Initialize the transport with a request handler
|
||||
*/
|
||||
public initialize(handler: (request: MCPRequest) => Promise<MCPResponse>): void {
|
||||
this.handler = handler;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the transport
|
||||
*/
|
||||
public abstract start(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Stop the transport
|
||||
*/
|
||||
public abstract stop(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Send a notification to a client
|
||||
*/
|
||||
public sendNotification?(notification: MCPNotification): void;
|
||||
|
||||
/**
|
||||
* Send a streaming response part
|
||||
*/
|
||||
public sendStreamPart?(streamPart: MCPStreamPart): void;
|
||||
}
|
||||
426
src/mcp/transports/http.transport.ts
Normal file
426
src/mcp/transports/http.transport.ts
Normal file
@@ -0,0 +1,426 @@
|
||||
/**
|
||||
* HTTP Transport for MCP
|
||||
*
|
||||
* This module implements a JSON-RPC 2.0 transport layer over HTTP/HTTPS
|
||||
* for the Model Context Protocol. It supports both traditional request/response
|
||||
* patterns as well as streaming responses via Server-Sent Events (SSE).
|
||||
*/
|
||||
|
||||
import { Server as HttpServer } from "http";
|
||||
import express, { Express, Request, Response, NextFunction } from "express";
|
||||
// Using a direct import now that we have the types
|
||||
import cors from "cors";
|
||||
import { TransportLayer, MCPRequest, MCPResponse, MCPStreamPart, MCPNotification, MCPErrorCode } from "../types.js";
|
||||
import { logger } from "../../utils/logger.js";
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
type ServerSentEventsClient = {
|
||||
id: string;
|
||||
response: Response;
|
||||
};
|
||||
|
||||
/**
|
||||
* Implementation of TransportLayer using HTTP/Express
|
||||
*/
|
||||
export class HttpTransport implements TransportLayer {
|
||||
public name = "http";
|
||||
private handler: ((request: MCPRequest) => Promise<MCPResponse>) | null = null;
|
||||
private app: Express;
|
||||
private server: HttpServer | null = null;
|
||||
private sseClients: Map<string, ServerSentEventsClient>;
|
||||
private events: EventEmitter;
|
||||
private initialized = false;
|
||||
private port: number;
|
||||
private corsOrigin: string | string[];
|
||||
private apiPrefix: string;
|
||||
private debug: boolean;
|
||||
|
||||
/**
|
||||
* Constructor for HttpTransport
|
||||
*/
|
||||
constructor(options: {
|
||||
port?: number;
|
||||
corsOrigin?: string | string[];
|
||||
apiPrefix?: string;
|
||||
debug?: boolean;
|
||||
} = {}) {
|
||||
this.port = options.port ?? (process.env.PORT ? parseInt(process.env.PORT, 10) : 3000);
|
||||
this.corsOrigin = options.corsOrigin ?? (process.env.CORS_ORIGIN || '*');
|
||||
this.apiPrefix = options.apiPrefix ?? '/api';
|
||||
this.debug = options.debug ?? (process.env.DEBUG_HTTP === "true");
|
||||
this.app = express();
|
||||
this.sseClients = new Map();
|
||||
this.events = new EventEmitter();
|
||||
|
||||
// Configure max event listeners
|
||||
this.events.setMaxListeners(100);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the transport with a request handler
|
||||
*/
|
||||
public initialize(handler: (request: MCPRequest) => Promise<MCPResponse>): void {
|
||||
if (this.initialized) {
|
||||
throw new Error("HttpTransport already initialized");
|
||||
}
|
||||
|
||||
this.handler = handler;
|
||||
this.initialized = true;
|
||||
|
||||
// Setup middleware
|
||||
this.setupMiddleware();
|
||||
|
||||
// Setup routes
|
||||
this.setupRoutes();
|
||||
|
||||
logger.info("HTTP transport initialized");
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Express middleware
|
||||
*/
|
||||
private setupMiddleware(): void {
|
||||
// JSON body parser
|
||||
this.app.use(express.json({ limit: '1mb' }));
|
||||
|
||||
// CORS configuration
|
||||
// Using the imported cors middleware
|
||||
try {
|
||||
this.app.use(cors({
|
||||
origin: this.corsOrigin,
|
||||
methods: ['GET', 'POST', 'OPTIONS'],
|
||||
allowedHeaders: ['Content-Type', 'Authorization'],
|
||||
credentials: true
|
||||
}));
|
||||
} catch (err) {
|
||||
logger.warn(`CORS middleware not available: ${String(err)}`);
|
||||
}
|
||||
|
||||
// Request logging
|
||||
if (this.debug) {
|
||||
this.app.use((req, res, next) => {
|
||||
logger.debug(`HTTP ${req.method} ${req.url}`);
|
||||
next();
|
||||
});
|
||||
}
|
||||
|
||||
// Error handling middleware
|
||||
this.app.use((err: Error, req: Request, res: Response, next: NextFunction) => {
|
||||
logger.error(`Express error: ${err.message}`);
|
||||
res.status(500).json({
|
||||
jsonrpc: "2.0",
|
||||
id: null,
|
||||
error: {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: "Internal server error",
|
||||
data: this.debug ? { stack: err.stack } : undefined
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup Express routes
|
||||
*/
|
||||
private setupRoutes(): void {
|
||||
// Health check endpoint
|
||||
this.app.get('/health', (req: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
status: 'ok',
|
||||
transport: 'http',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
});
|
||||
|
||||
// Server info endpoint
|
||||
this.app.get(`${this.apiPrefix}/info`, (req: Request, res: Response) => {
|
||||
res.status(200).json({
|
||||
jsonrpc: "2.0",
|
||||
result: {
|
||||
name: "Model Context Protocol Server",
|
||||
version: "1.0.0",
|
||||
transport: "http",
|
||||
protocol: "json-rpc-2.0",
|
||||
features: ["streaming"],
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// SSE stream endpoint
|
||||
this.app.get(`${this.apiPrefix}/stream`, (req: Request, res: Response) => {
|
||||
const clientId = (req.query.clientId as string) || `client-${Date.now()}-${Math.random().toString(36).substring(2, 9)}`;
|
||||
|
||||
// Set headers for SSE
|
||||
res.setHeader('Content-Type', 'text/event-stream');
|
||||
res.setHeader('Cache-Control', 'no-cache');
|
||||
res.setHeader('Connection', 'keep-alive');
|
||||
|
||||
// Store the client
|
||||
this.sseClients.set(clientId, { id: clientId, response: res });
|
||||
|
||||
// Send initial connection established event
|
||||
res.write(`event: connected\ndata: ${JSON.stringify({ clientId })}\n\n`);
|
||||
|
||||
// Client disconnection handler
|
||||
req.on('close', () => {
|
||||
if (this.debug) {
|
||||
logger.debug(`SSE client disconnected: ${clientId}`);
|
||||
}
|
||||
this.sseClients.delete(clientId);
|
||||
});
|
||||
|
||||
if (this.debug) {
|
||||
logger.debug(`SSE client connected: ${clientId}`);
|
||||
}
|
||||
});
|
||||
|
||||
// JSON-RPC endpoint
|
||||
this.app.post(`${this.apiPrefix}/jsonrpc`, (req: Request, res: Response) => {
|
||||
void this.handleJsonRpcRequest(req, res);
|
||||
});
|
||||
|
||||
// Default 404 handler
|
||||
this.app.use((req: Request, res: Response) => {
|
||||
res.status(404).json({
|
||||
jsonrpc: "2.0",
|
||||
id: null,
|
||||
error: {
|
||||
code: MCPErrorCode.METHOD_NOT_FOUND,
|
||||
message: "Not found"
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a JSON-RPC request from HTTP
|
||||
*/
|
||||
private async handleJsonRpcRequest(req: Request, res: Response): Promise<void> {
|
||||
if (!this.handler) {
|
||||
res.status(500).json({
|
||||
jsonrpc: "2.0",
|
||||
id: req.body.id || null,
|
||||
error: {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: "Transport not properly initialized"
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate it's JSON-RPC 2.0
|
||||
if (!req.body.jsonrpc || req.body.jsonrpc !== "2.0") {
|
||||
res.status(400).json({
|
||||
jsonrpc: "2.0",
|
||||
id: req.body.id || null,
|
||||
error: {
|
||||
code: MCPErrorCode.INVALID_REQUEST,
|
||||
message: "Invalid JSON-RPC 2.0 request: missing or invalid jsonrpc version"
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Check for batch requests
|
||||
if (Array.isArray(req.body)) {
|
||||
res.status(501).json({
|
||||
jsonrpc: "2.0",
|
||||
id: null,
|
||||
error: {
|
||||
code: MCPErrorCode.METHOD_NOT_FOUND,
|
||||
message: "Batch requests are not supported"
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle request
|
||||
const request: MCPRequest = {
|
||||
jsonrpc: req.body.jsonrpc,
|
||||
id: req.body.id ?? null,
|
||||
method: req.body.method,
|
||||
params: req.body.params
|
||||
};
|
||||
|
||||
// Get streaming preference from query params
|
||||
const useStreaming = req.query.stream === 'true';
|
||||
|
||||
// Extract client ID if provided (for streaming)
|
||||
const clientId = (req.query.clientId as string) || (req.body.clientId as string);
|
||||
|
||||
// Check if this is a streaming request and client is connected
|
||||
if (useStreaming && clientId && this.sseClients.has(clientId)) {
|
||||
// Add streaming metadata to the request
|
||||
request.streaming = {
|
||||
enabled: true,
|
||||
clientId
|
||||
};
|
||||
}
|
||||
|
||||
// Process the request
|
||||
const response = await this.handler(request);
|
||||
|
||||
// Return the response
|
||||
res.status(200).json({
|
||||
jsonrpc: "2.0",
|
||||
...response
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error(`Error handling JSON-RPC request: ${String(error)}`);
|
||||
|
||||
res.status(500).json({
|
||||
jsonrpc: "2.0",
|
||||
id: req.body?.id || null,
|
||||
error: {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: error instanceof Error ? error.message : "Internal error",
|
||||
data: this.debug && error instanceof Error ? { stack: error.stack } : undefined
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the HTTP server
|
||||
*/
|
||||
public async start(): Promise<void> {
|
||||
if (!this.initialized) {
|
||||
throw new Error("HttpTransport not initialized");
|
||||
}
|
||||
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
try {
|
||||
this.server = this.app.listen(this.port, () => {
|
||||
logger.info(`HTTP transport started on port ${this.port}`);
|
||||
resolve();
|
||||
});
|
||||
|
||||
// Error handler
|
||||
this.server.on('error', (err) => {
|
||||
logger.error(`HTTP server error: ${String(err)}`);
|
||||
reject(err);
|
||||
});
|
||||
} catch (err) {
|
||||
logger.error(`Failed to start HTTP transport: ${String(err)}`);
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the HTTP server
|
||||
*/
|
||||
public async stop(): Promise<void> {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
// Close server if running
|
||||
if (this.server) {
|
||||
this.server.close((err) => {
|
||||
if (err) {
|
||||
logger.error(`Error shutting down HTTP server: ${String(err)}`);
|
||||
reject(err);
|
||||
} else {
|
||||
logger.info("HTTP transport stopped");
|
||||
this.server = null;
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
} else {
|
||||
resolve();
|
||||
}
|
||||
|
||||
// Close all SSE connections
|
||||
for (const client of this.sseClients.values()) {
|
||||
try {
|
||||
client.response.write(`event: shutdown\ndata: {}\n\n`);
|
||||
client.response.end();
|
||||
} catch (err) {
|
||||
logger.error(`Error closing SSE connection: ${String(err)}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Clear all clients
|
||||
this.sseClients.clear();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an SSE event to a specific client
|
||||
*/
|
||||
private sendSSEEvent(clientId: string, event: string, data: unknown): boolean {
|
||||
const client = this.sseClients.get(clientId);
|
||||
if (!client) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const payload = JSON.stringify(data);
|
||||
client.response.write(`event: ${event}\ndata: ${payload}\n\n`);
|
||||
return true;
|
||||
} catch (err) {
|
||||
logger.error(`Error sending SSE event: ${String(err)}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a notification to a client
|
||||
*/
|
||||
public sendNotification(notification: MCPNotification): void {
|
||||
// SSE notifications not supported without a client ID
|
||||
return;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a streaming response part
|
||||
*/
|
||||
public sendStreamPart(streamPart: MCPStreamPart): void {
|
||||
// Find the client ID in streaming metadata
|
||||
const clientId = streamPart.clientId;
|
||||
if (!clientId || !this.sseClients.has(clientId)) {
|
||||
logger.warn(`Cannot send stream part: client ${clientId || 'unknown'} not connected`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Send the stream part as an SSE event
|
||||
const eventPayload = {
|
||||
jsonrpc: "2.0",
|
||||
id: streamPart.id,
|
||||
stream: {
|
||||
partId: streamPart.partId,
|
||||
final: streamPart.final,
|
||||
data: streamPart.data
|
||||
}
|
||||
};
|
||||
|
||||
this.sendSSEEvent(clientId, 'stream', eventPayload);
|
||||
|
||||
// Debug logging
|
||||
if (this.debug) {
|
||||
logger.debug(`Sent stream part to client ${clientId}: partId=${streamPart.partId}, final=${streamPart.final}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast a notification to all connected clients
|
||||
*/
|
||||
public broadcastNotification(event: string, data: unknown): void {
|
||||
for (const client of this.sseClients.values()) {
|
||||
try {
|
||||
const payload = JSON.stringify(data);
|
||||
client.response.write(`event: ${event}\ndata: ${payload}\n\n`);
|
||||
} catch (err) {
|
||||
logger.error(`Error broadcasting to client ${client.id}: ${String(err)}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a log message (not applicable for HTTP transport)
|
||||
*/
|
||||
public sendLogMessage(level: string, message: string, data?: unknown): void {
|
||||
// Log messages in HTTP context go to the logger, not to clients
|
||||
logger[level as keyof typeof logger]?.(message, data);
|
||||
}
|
||||
}
|
||||
329
src/mcp/transports/stdio.transport.ts
Normal file
329
src/mcp/transports/stdio.transport.ts
Normal file
@@ -0,0 +1,329 @@
|
||||
/**
|
||||
* Stdio Transport for MCP
|
||||
*
|
||||
* This module provides a transport that uses standard input/output
|
||||
* for JSON-RPC 2.0 communication. This is particularly useful for
|
||||
* integration with AI assistants like Claude, GPT, and Cursor.
|
||||
*/
|
||||
|
||||
import { BaseTransport } from "../transport.js";
|
||||
import { logger } from "../../utils/logger.js";
|
||||
import { MCPServer } from "../MCPServer.js";
|
||||
import type { MCPRequest, MCPResponse, ToolExecutionResult } from "../types.js";
|
||||
import { JSONRPCError } from "../utils/error.js";
|
||||
|
||||
/**
|
||||
* StdioTransport options
|
||||
*/
|
||||
export interface StdioTransportOptions {
|
||||
/** Whether to enable silent mode (suppress non-essential output) */
|
||||
silent?: boolean;
|
||||
/** Whether to enable debug mode */
|
||||
debug?: boolean;
|
||||
/** Reference to an MCPServer instance */
|
||||
server?: MCPServer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transport implementation for standard input/output
|
||||
* Communicates using JSON-RPC 2.0 protocol
|
||||
*/
|
||||
export class StdioTransport extends BaseTransport {
|
||||
private isStarted = false;
|
||||
private silent: boolean;
|
||||
private debug: boolean;
|
||||
private server: MCPServer | null = null;
|
||||
|
||||
constructor(options: StdioTransportOptions = {}) {
|
||||
super();
|
||||
this.silent = options.silent ?? false;
|
||||
this.debug = options.debug ?? false;
|
||||
|
||||
if (options.server) {
|
||||
this.server = options.server;
|
||||
}
|
||||
|
||||
// Configure stdin to not buffer input
|
||||
process.stdin.setEncoding('utf8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the server reference to access tools and other server properties
|
||||
*/
|
||||
public setServer(server: MCPServer): void {
|
||||
this.server = server;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start the transport and setup stdin/stdout handlers
|
||||
*/
|
||||
public async start(): Promise<void> {
|
||||
if (this.isStarted) return;
|
||||
|
||||
if (!this.silent) {
|
||||
logger.info('Starting stdio transport');
|
||||
}
|
||||
|
||||
// Setup input handling
|
||||
this.setupInputHandling();
|
||||
|
||||
this.isStarted = true;
|
||||
|
||||
if (!this.silent) {
|
||||
logger.info('Stdio transport started');
|
||||
}
|
||||
|
||||
// Send system info notification
|
||||
this.sendSystemInfo();
|
||||
|
||||
// Send available tools notification
|
||||
this.sendAvailableTools();
|
||||
}
|
||||
|
||||
/**
|
||||
* Send system information as a notification
|
||||
* This helps clients understand the capabilities of the server
|
||||
*/
|
||||
private sendSystemInfo(): void {
|
||||
const notification = {
|
||||
jsonrpc: '2.0',
|
||||
method: 'system.info',
|
||||
params: {
|
||||
name: 'Home Assistant Model Context Protocol Server',
|
||||
version: '1.0.0',
|
||||
transport: 'stdio',
|
||||
protocol: 'json-rpc-2.0',
|
||||
features: ['streaming'],
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
|
||||
// Send directly to stdout
|
||||
process.stdout.write(JSON.stringify(notification) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Send available tools as a notification
|
||||
* This helps clients know what tools are available to use
|
||||
*/
|
||||
private sendAvailableTools(): void {
|
||||
if (!this.server) {
|
||||
logger.warn('Cannot send available tools: server reference not set');
|
||||
return;
|
||||
}
|
||||
|
||||
const tools = this.server.getAllTools().map(tool => {
|
||||
// For parameters, create a simple JSON schema or empty object
|
||||
const parameters = tool.parameters
|
||||
? { type: 'object', properties: {} } // Simplified schema
|
||||
: { type: 'object', properties: {} };
|
||||
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters,
|
||||
metadata: tool.metadata
|
||||
};
|
||||
});
|
||||
|
||||
const notification = {
|
||||
jsonrpc: '2.0',
|
||||
method: 'tools.available',
|
||||
params: { tools }
|
||||
};
|
||||
|
||||
// Send directly to stdout
|
||||
process.stdout.write(JSON.stringify(notification) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Set up the input handling for JSON-RPC requests
|
||||
*/
|
||||
private setupInputHandling(): void {
|
||||
let buffer = '';
|
||||
|
||||
process.stdin.on('data', (chunk: string) => {
|
||||
buffer += chunk;
|
||||
|
||||
try {
|
||||
// Look for complete JSON objects by matching opening and closing braces
|
||||
let startIndex = 0;
|
||||
let openBraces = 0;
|
||||
let inString = false;
|
||||
let escapeNext = false;
|
||||
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
const char = buffer[i];
|
||||
|
||||
if (escapeNext) {
|
||||
escapeNext = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '\\' && inString) {
|
||||
escapeNext = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '"' && !escapeNext) {
|
||||
inString = !inString;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inString) {
|
||||
if (char === '{') {
|
||||
if (openBraces === 0) {
|
||||
startIndex = i;
|
||||
}
|
||||
openBraces++;
|
||||
} else if (char === '}') {
|
||||
openBraces--;
|
||||
|
||||
if (openBraces === 0) {
|
||||
// We have a complete JSON object
|
||||
const jsonStr = buffer.substring(startIndex, i + 1);
|
||||
this.handleJsonRequest(jsonStr);
|
||||
|
||||
// Remove the processed part from the buffer
|
||||
buffer = buffer.substring(i + 1);
|
||||
|
||||
// Reset the parser to start from the beginning of the new buffer
|
||||
i = -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (this.debug) {
|
||||
logger.error('Error processing JSON-RPC input', error);
|
||||
}
|
||||
|
||||
this.sendErrorResponse(null, new JSONRPCError.ParseError('Invalid JSON'));
|
||||
}
|
||||
});
|
||||
|
||||
process.stdin.on('end', () => {
|
||||
if (!this.silent) {
|
||||
logger.info('Stdio transport: stdin ended');
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.stdin.on('error', (error) => {
|
||||
logger.error('Stdio transport: stdin error', error);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a JSON-RPC request
|
||||
*/
|
||||
private async handleJsonRequest(jsonStr: string): Promise<void> {
|
||||
try {
|
||||
const request = JSON.parse(jsonStr);
|
||||
|
||||
if (this.debug) {
|
||||
logger.debug(`Received request: ${jsonStr}`);
|
||||
}
|
||||
|
||||
if (!request.jsonrpc || request.jsonrpc !== '2.0') {
|
||||
return this.sendErrorResponse(
|
||||
request.id,
|
||||
new JSONRPCError.InvalidRequest('Invalid JSON-RPC 2.0 request')
|
||||
);
|
||||
}
|
||||
|
||||
const mcpRequest: MCPRequest = {
|
||||
jsonrpc: request.jsonrpc,
|
||||
id: request.id,
|
||||
method: request.method,
|
||||
params: request.params || {}
|
||||
};
|
||||
|
||||
if (!this.server) {
|
||||
return this.sendErrorResponse(
|
||||
request.id,
|
||||
new JSONRPCError.InternalError('Server not available')
|
||||
);
|
||||
}
|
||||
|
||||
// Delegate to the server to handle the request
|
||||
if (this.handler) {
|
||||
const response = await this.handler(mcpRequest);
|
||||
this.sendResponse(response);
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
if (error instanceof SyntaxError) {
|
||||
this.sendErrorResponse(null, new JSONRPCError.ParseError('Invalid JSON'));
|
||||
} else {
|
||||
this.sendErrorResponse(null, new JSONRPCError.InternalError('Internal error'));
|
||||
}
|
||||
|
||||
if (this.debug) {
|
||||
logger.error('Error handling JSON-RPC request', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a JSON-RPC error response
|
||||
*/
|
||||
private sendErrorResponse(id: string | number | null, error: JSONRPCError.JSONRPCError): void {
|
||||
const response = {
|
||||
jsonrpc: '2.0',
|
||||
id: id,
|
||||
error: {
|
||||
code: error.code,
|
||||
message: error.message,
|
||||
data: error.data
|
||||
}
|
||||
};
|
||||
|
||||
process.stdout.write(JSON.stringify(response) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an MCPResponse to the client
|
||||
*/
|
||||
public sendResponse(response: MCPResponse): void {
|
||||
const jsonRpcResponse = {
|
||||
jsonrpc: '2.0',
|
||||
id: response.id,
|
||||
...(response.error
|
||||
? { error: response.error }
|
||||
: { result: response.result })
|
||||
};
|
||||
|
||||
process.stdout.write(JSON.stringify(jsonRpcResponse) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream a partial response for long-running operations
|
||||
*/
|
||||
public streamResponsePart(requestId: string | number, result: ToolExecutionResult): void {
|
||||
const streamResponse = {
|
||||
jsonrpc: '2.0',
|
||||
method: 'stream.data',
|
||||
params: {
|
||||
id: requestId,
|
||||
data: result
|
||||
}
|
||||
};
|
||||
|
||||
process.stdout.write(JSON.stringify(streamResponse) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop the transport
|
||||
*/
|
||||
public async stop(): Promise<void> {
|
||||
if (!this.isStarted) return;
|
||||
|
||||
if (!this.silent) {
|
||||
logger.info('Stopping stdio transport');
|
||||
}
|
||||
|
||||
this.isStarted = false;
|
||||
}
|
||||
}
|
||||
220
src/mcp/types.ts
Normal file
220
src/mcp/types.ts
Normal file
@@ -0,0 +1,220 @@
|
||||
/**
|
||||
* MCP Type Definitions
|
||||
*
|
||||
* This file contains all the type definitions used by the Model Context Protocol
|
||||
* implementation, including tools, transports, middleware, and resources.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
import { Logger } from "winston";
|
||||
import { MCPServer, MCPErrorCode, MCPServerEvents } from "./MCPServer.js";
|
||||
|
||||
/**
|
||||
* MCP Server configuration
|
||||
*/
|
||||
export interface MCPConfig {
|
||||
maxRetries: number;
|
||||
retryDelay: number;
|
||||
executionTimeout: number;
|
||||
streamingEnabled: boolean;
|
||||
maxPayloadSize: number;
|
||||
}
|
||||
|
||||
// Re-export enums from MCPServer
|
||||
export { MCPErrorCode, MCPServerEvents };
|
||||
|
||||
/**
|
||||
* Tool definition interface
|
||||
*/
|
||||
export interface ToolDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters?: z.ZodType<any>;
|
||||
returnType?: z.ZodType<any>;
|
||||
execute: (params: any, context: MCPContext) => Promise<any>;
|
||||
metadata?: ToolMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool metadata for categorization and discovery
|
||||
*/
|
||||
export interface ToolMetadata {
|
||||
category: string;
|
||||
version: string;
|
||||
tags?: string[];
|
||||
platforms?: string[];
|
||||
requiresAuth?: boolean;
|
||||
isStreaming?: boolean;
|
||||
examples?: ToolExample[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Example usage for a tool
|
||||
*/
|
||||
export interface ToolExample {
|
||||
description: string;
|
||||
params: any;
|
||||
expectedResult?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC Request
|
||||
*/
|
||||
export interface MCPRequest {
|
||||
jsonrpc: string;
|
||||
id: string | number | null;
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
streaming?: {
|
||||
enabled: boolean;
|
||||
clientId: string;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Response
|
||||
*/
|
||||
export interface MCPResponse {
|
||||
jsonrpc?: string;
|
||||
id?: string | number;
|
||||
result?: any;
|
||||
error?: MCPError;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Error
|
||||
*/
|
||||
export interface MCPError {
|
||||
code: number;
|
||||
message: string;
|
||||
data?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC 2.0 Notification
|
||||
*/
|
||||
export interface MCPNotification {
|
||||
jsonrpc?: string;
|
||||
method: string;
|
||||
params?: any;
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC Stream Part
|
||||
*/
|
||||
export interface MCPStreamPart {
|
||||
id: string | number;
|
||||
partId: string | number;
|
||||
final: boolean;
|
||||
data: unknown;
|
||||
clientId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Response Stream Interface for streaming operation results
|
||||
*/
|
||||
export interface MCPResponseStream {
|
||||
/**
|
||||
* Write partial result data to the stream
|
||||
*
|
||||
* @param data The partial result data
|
||||
* @returns True if the write was successful, false otherwise
|
||||
*/
|
||||
write(data: any): boolean;
|
||||
|
||||
/**
|
||||
* End the stream, indicating no more data will be sent
|
||||
*
|
||||
* @param data Optional final data to send
|
||||
*/
|
||||
end(data?: any): void;
|
||||
|
||||
/**
|
||||
* Check if streaming is enabled
|
||||
*/
|
||||
readonly isEnabled: boolean;
|
||||
|
||||
/**
|
||||
* Get the client ID for this stream
|
||||
*/
|
||||
readonly clientId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Context for tool execution
|
||||
*/
|
||||
export interface MCPContext {
|
||||
requestId: string | number;
|
||||
startTime: number;
|
||||
resourceManager: ResourceManager;
|
||||
tools: Map<string, ToolDefinition>;
|
||||
config: MCPConfig;
|
||||
logger: Logger;
|
||||
server: MCPServer;
|
||||
state?: Map<string, any>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resource manager interface
|
||||
*/
|
||||
export interface ResourceManager {
|
||||
acquire: (resourceType: string, resourceId: string, context: MCPContext) => Promise<any>;
|
||||
release: (resourceType: string, resourceId: string, context: MCPContext) => Promise<void>;
|
||||
list: (context: MCPContext, resourceType?: string) => Promise<string[]>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Middleware function type
|
||||
*/
|
||||
export type MCPMiddleware = (
|
||||
request: MCPRequest,
|
||||
context: MCPContext,
|
||||
next: () => Promise<MCPResponse>
|
||||
) => Promise<MCPResponse>;
|
||||
|
||||
/**
|
||||
* Transport layer interface
|
||||
*/
|
||||
export interface TransportLayer {
|
||||
name: string;
|
||||
initialize: (handler: (request: MCPRequest) => Promise<MCPResponse>) => void;
|
||||
start: () => Promise<void>;
|
||||
stop: () => Promise<void>;
|
||||
sendNotification?: (notification: MCPNotification) => void;
|
||||
sendStreamPart?: (streamPart: MCPStreamPart) => void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Claude-specific function call formats
|
||||
*/
|
||||
export interface ClaudeFunctionDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters: {
|
||||
type: string;
|
||||
properties: Record<string, {
|
||||
type: string;
|
||||
description: string;
|
||||
enum?: string[];
|
||||
}>;
|
||||
required: string[];
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Cursor-specific integration types
|
||||
*/
|
||||
export interface CursorToolDefinition {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters: Record<string, {
|
||||
type: string;
|
||||
description: string;
|
||||
required: boolean;
|
||||
}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool execution result type used in streaming responses
|
||||
*/
|
||||
export type ToolExecutionResult = any;
|
||||
129
src/mcp/utils/claude.ts
Normal file
129
src/mcp/utils/claude.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
/**
|
||||
* Claude Integration Utilities
|
||||
*
|
||||
* This file contains utilities for integrating with Claude AI models.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { ToolDefinition } from '../types.js';
|
||||
|
||||
/**
|
||||
* Convert a Zod schema to a JSON Schema for Claude
|
||||
*/
|
||||
export function zodToJsonSchema(schema: z.ZodType<any>): any {
|
||||
if (!schema) return { type: 'object', properties: {} };
|
||||
|
||||
// Handle ZodObject
|
||||
if (schema instanceof z.ZodObject) {
|
||||
const shape = (schema as any)._def.shape();
|
||||
const properties: Record<string, any> = {};
|
||||
const required: string[] = [];
|
||||
|
||||
for (const [key, value] of Object.entries(shape)) {
|
||||
if (!(value instanceof z.ZodOptional)) {
|
||||
required.push(key);
|
||||
}
|
||||
|
||||
properties[key] = zodTypeToJsonSchema(value as z.ZodType<any>);
|
||||
}
|
||||
|
||||
return {
|
||||
type: 'object',
|
||||
properties,
|
||||
required: required.length > 0 ? required : undefined
|
||||
};
|
||||
}
|
||||
|
||||
// Handle other schema types
|
||||
return { type: 'object', properties: {} };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a Zod type to JSON Schema type
|
||||
*/
|
||||
export function zodTypeToJsonSchema(zodType: z.ZodType<any>): any {
|
||||
if (zodType instanceof z.ZodString) {
|
||||
return { type: 'string' };
|
||||
} else if (zodType instanceof z.ZodNumber) {
|
||||
return { type: 'number' };
|
||||
} else if (zodType instanceof z.ZodBoolean) {
|
||||
return { type: 'boolean' };
|
||||
} else if (zodType instanceof z.ZodArray) {
|
||||
return {
|
||||
type: 'array',
|
||||
items: zodTypeToJsonSchema((zodType as any)._def.type)
|
||||
};
|
||||
} else if (zodType instanceof z.ZodEnum) {
|
||||
return {
|
||||
type: 'string',
|
||||
enum: (zodType as any)._def.values
|
||||
};
|
||||
} else if (zodType instanceof z.ZodOptional) {
|
||||
return zodTypeToJsonSchema((zodType as any)._def.innerType);
|
||||
} else if (zodType instanceof z.ZodObject) {
|
||||
return zodToJsonSchema(zodType);
|
||||
}
|
||||
|
||||
return { type: 'object' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create Claude-compatible tool definitions from MCP tools
|
||||
*
|
||||
* @param tools Array of MCP tool definitions
|
||||
* @returns Array of Claude-compatible tool definitions
|
||||
*/
|
||||
export function createClaudeToolDefinitions(tools: ToolDefinition[]): any[] {
|
||||
return tools.map(tool => {
|
||||
const parameters = tool.parameters
|
||||
? zodToJsonSchema(tool.parameters)
|
||||
: { type: 'object', properties: {} };
|
||||
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Format an MCP tool execution request for Claude
|
||||
*/
|
||||
export function formatToolExecutionRequest(toolName: string, params: Record<string, unknown>): any {
|
||||
return {
|
||||
type: 'tool_use',
|
||||
name: toolName,
|
||||
parameters: params
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a Claude tool execution response
|
||||
*/
|
||||
export function parseToolExecutionResponse(response: any): {
|
||||
success: boolean;
|
||||
result?: any;
|
||||
error?: string;
|
||||
} {
|
||||
if (!response || typeof response !== 'object') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid tool execution response'
|
||||
};
|
||||
}
|
||||
|
||||
if ('error' in response) {
|
||||
return {
|
||||
success: false,
|
||||
error: typeof response.error === 'string'
|
||||
? response.error
|
||||
: JSON.stringify(response.error)
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
result: response
|
||||
};
|
||||
}
|
||||
131
src/mcp/utils/cursor.ts
Normal file
131
src/mcp/utils/cursor.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/**
|
||||
* Cursor Integration Utilities
|
||||
*
|
||||
* This file contains utilities for integrating with Cursor IDE.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { ToolDefinition } from '../types.js';
|
||||
|
||||
/**
|
||||
* Create Cursor-compatible tool definitions from MCP tools
|
||||
*
|
||||
* @param tools Array of MCP tool definitions
|
||||
* @returns Array of Cursor-compatible tool definitions
|
||||
*/
|
||||
export function createCursorToolDefinitions(tools: ToolDefinition[]): any[] {
|
||||
return tools.map(tool => {
|
||||
// Convert parameters to Cursor format
|
||||
const parameters = tool.parameters
|
||||
? extractParametersFromZod(tool.parameters)
|
||||
: {};
|
||||
|
||||
return {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract parameters from a Zod schema for Cursor integration
|
||||
*/
|
||||
function extractParametersFromZod(schema: z.ZodType<any>): Record<string, any> {
|
||||
if (!(schema instanceof z.ZodObject)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const shape = (schema as any)._def.shape();
|
||||
const params: Record<string, any> = {};
|
||||
|
||||
for (const [key, value] of Object.entries(shape)) {
|
||||
const isRequired = !(value instanceof z.ZodOptional);
|
||||
|
||||
let type = 'string';
|
||||
let description = '';
|
||||
|
||||
// Get description if available
|
||||
try {
|
||||
description = value._def.description || '';
|
||||
} catch (e) {
|
||||
// Ignore if description is not available
|
||||
}
|
||||
|
||||
// Determine the type
|
||||
if (value instanceof z.ZodString) {
|
||||
type = 'string';
|
||||
} else if (value instanceof z.ZodNumber) {
|
||||
type = 'number';
|
||||
} else if (value instanceof z.ZodBoolean) {
|
||||
type = 'boolean';
|
||||
} else if (value instanceof z.ZodArray) {
|
||||
type = 'array';
|
||||
} else if (value instanceof z.ZodEnum) {
|
||||
type = 'string';
|
||||
} else if (value instanceof z.ZodObject) {
|
||||
type = 'object';
|
||||
} else if (value instanceof z.ZodOptional) {
|
||||
// Get the inner type
|
||||
const innerValue = value._def.innerType;
|
||||
if (innerValue instanceof z.ZodString) {
|
||||
type = 'string';
|
||||
} else if (innerValue instanceof z.ZodNumber) {
|
||||
type = 'number';
|
||||
} else if (innerValue instanceof z.ZodBoolean) {
|
||||
type = 'boolean';
|
||||
} else if (innerValue instanceof z.ZodArray) {
|
||||
type = 'array';
|
||||
} else {
|
||||
type = 'object';
|
||||
}
|
||||
}
|
||||
|
||||
params[key] = {
|
||||
type,
|
||||
description,
|
||||
required: isRequired
|
||||
};
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a tool response for Cursor
|
||||
*/
|
||||
export function formatCursorResponse(response: any): any {
|
||||
// For now, just return the response as-is
|
||||
// Cursor expects a specific format, which may need to be customized
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse a Cursor tool execution request
|
||||
*/
|
||||
export function parseCursorRequest(request: any): {
|
||||
success: boolean;
|
||||
toolName?: string;
|
||||
params?: Record<string, any>;
|
||||
error?: string;
|
||||
} {
|
||||
if (!request || typeof request !== 'object') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Invalid request format'
|
||||
};
|
||||
}
|
||||
|
||||
if (!request.name || typeof request.name !== 'string') {
|
||||
return {
|
||||
success: false,
|
||||
error: 'Missing or invalid tool name'
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
toolName: request.name,
|
||||
params: request.parameters || {}
|
||||
};
|
||||
}
|
||||
194
src/mcp/utils/error.ts
Normal file
194
src/mcp/utils/error.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
/**
|
||||
* Error Handling Utilities
|
||||
*
|
||||
* This file contains utilities for handling errors in the MCP implementation.
|
||||
*/
|
||||
|
||||
import { MCPErrorCode, MCPError } from '../types.js';
|
||||
|
||||
/**
|
||||
* Create an MCP error object
|
||||
*/
|
||||
export function createError(
|
||||
code: MCPErrorCode,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): MCPError {
|
||||
return {
|
||||
code,
|
||||
message,
|
||||
data
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Format an error for JSON-RPC response
|
||||
*/
|
||||
export function formatJsonRpcError(
|
||||
id: string | number | null,
|
||||
code: MCPErrorCode,
|
||||
message: string,
|
||||
data?: unknown
|
||||
): any {
|
||||
return {
|
||||
jsonrpc: '2.0',
|
||||
id,
|
||||
error: {
|
||||
code,
|
||||
message,
|
||||
data
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle unexpected errors and convert to MCPError
|
||||
*/
|
||||
export function handleUnexpectedError(error: unknown): MCPError {
|
||||
if (error instanceof Error) {
|
||||
return {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: error.message,
|
||||
data: {
|
||||
name: error.name,
|
||||
stack: error.stack
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
code: MCPErrorCode.INTERNAL_ERROR,
|
||||
message: 'An unexpected error occurred',
|
||||
data: error
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Safe JSON stringify with circular reference handling
|
||||
*/
|
||||
export function safeStringify(obj: unknown): string {
|
||||
const seen = new WeakSet();
|
||||
return JSON.stringify(obj, (key, value) => {
|
||||
if (typeof value === 'object' && value !== null) {
|
||||
if (seen.has(value)) {
|
||||
return '[Circular]';
|
||||
}
|
||||
seen.add(value);
|
||||
}
|
||||
return value;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* JSON-RPC error related utilities and classes
|
||||
*/
|
||||
export namespace JSONRPCError {
|
||||
/**
|
||||
* Standard JSON-RPC 2.0 error codes
|
||||
*/
|
||||
export enum ErrorCode {
|
||||
PARSE_ERROR = -32700,
|
||||
INVALID_REQUEST = -32600,
|
||||
METHOD_NOT_FOUND = -32601,
|
||||
INVALID_PARAMS = -32602,
|
||||
INTERNAL_ERROR = -32603,
|
||||
// Implementation specific error codes
|
||||
SERVER_ERROR_START = -32099,
|
||||
SERVER_ERROR_END = -32000,
|
||||
// MCP specific error codes
|
||||
TOOL_EXECUTION_ERROR = -32000,
|
||||
VALIDATION_ERROR = -32001,
|
||||
}
|
||||
|
||||
/**
|
||||
* Base JSON-RPC Error class
|
||||
*/
|
||||
export class JSONRPCError extends Error {
|
||||
public code: number;
|
||||
public data?: unknown;
|
||||
|
||||
constructor(message: string, code: number, data?: unknown) {
|
||||
super(message);
|
||||
this.name = 'JSONRPCError';
|
||||
this.code = code;
|
||||
this.data = data;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse Error (-32700)
|
||||
* Invalid JSON was received by the server.
|
||||
*/
|
||||
export class ParseError extends JSONRPCError {
|
||||
constructor(message: string = 'Parse error', data?: unknown) {
|
||||
super(message, ErrorCode.PARSE_ERROR, data);
|
||||
this.name = 'ParseError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalid Request (-32600)
|
||||
* The JSON sent is not a valid Request object.
|
||||
*/
|
||||
export class InvalidRequest extends JSONRPCError {
|
||||
constructor(message: string = 'Invalid request', data?: unknown) {
|
||||
super(message, ErrorCode.INVALID_REQUEST, data);
|
||||
this.name = 'InvalidRequest';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method Not Found (-32601)
|
||||
* The method does not exist / is not available.
|
||||
*/
|
||||
export class MethodNotFound extends JSONRPCError {
|
||||
constructor(message: string = 'Method not found', data?: unknown) {
|
||||
super(message, ErrorCode.METHOD_NOT_FOUND, data);
|
||||
this.name = 'MethodNotFound';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalid Params (-32602)
|
||||
* Invalid method parameter(s).
|
||||
*/
|
||||
export class InvalidParams extends JSONRPCError {
|
||||
constructor(message: string = 'Invalid params', data?: unknown) {
|
||||
super(message, ErrorCode.INVALID_PARAMS, data);
|
||||
this.name = 'InvalidParams';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Internal Error (-32603)
|
||||
* Internal JSON-RPC error.
|
||||
*/
|
||||
export class InternalError extends JSONRPCError {
|
||||
constructor(message: string = 'Internal error', data?: unknown) {
|
||||
super(message, ErrorCode.INTERNAL_ERROR, data);
|
||||
this.name = 'InternalError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Tool Execution Error (-32000)
|
||||
* Error during tool execution.
|
||||
*/
|
||||
export class ToolExecutionError extends JSONRPCError {
|
||||
constructor(message: string = 'Tool execution error', data?: unknown) {
|
||||
super(message, ErrorCode.TOOL_EXECUTION_ERROR, data);
|
||||
this.name = 'ToolExecutionError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validation Error (-32001)
|
||||
* Error during validation of params or result.
|
||||
*/
|
||||
export class ValidationError extends JSONRPCError {
|
||||
constructor(message: string = 'Validation error', data?: unknown) {
|
||||
super(message, ErrorCode.VALIDATION_ERROR, data);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
}
|
||||
}
|
||||
26
src/middleware/rate-limit.middleware.ts
Normal file
26
src/middleware/rate-limit.middleware.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import { APP_CONFIG } from '../config.js';
|
||||
|
||||
// Create a limiter for API endpoints
|
||||
export const apiLimiter = rateLimit({
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: APP_CONFIG.rateLimit?.maxRequests || 100, // Limit each IP to 100 requests per windowMs
|
||||
message: {
|
||||
status: 'error',
|
||||
message: 'Too many requests from this IP, please try again later.'
|
||||
},
|
||||
standardHeaders: true, // Return rate limit info in the `RateLimit-*` headers
|
||||
legacyHeaders: false, // Disable the `X-RateLimit-*` headers
|
||||
});
|
||||
|
||||
// Create a stricter limiter for authentication endpoints
|
||||
export const authLimiter = rateLimit({
|
||||
windowMs: 60 * 60 * 1000, // 1 hour
|
||||
max: APP_CONFIG.rateLimit?.maxAuthRequests || 5, // Limit each IP to 5 login requests per hour
|
||||
message: {
|
||||
status: 'error',
|
||||
message: 'Too many login attempts from this IP, please try again later.'
|
||||
},
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
});
|
||||
284
src/openapi.ts
Normal file
284
src/openapi.ts
Normal file
@@ -0,0 +1,284 @@
|
||||
import type { OpenAPIV3 } from 'openapi-types'
|
||||
|
||||
export const openApiConfig: OpenAPIV3.Document = {
|
||||
openapi: '3.0.0',
|
||||
info: {
|
||||
title: 'Home Assistant MCP API',
|
||||
description: `
|
||||
# Home Assistant Model Context Protocol API
|
||||
|
||||
The Model Context Protocol (MCP) provides a standardized interface for AI tools to interact with Home Assistant.
|
||||
This API documentation covers all available endpoints and features of the MCP server.
|
||||
|
||||
## Features
|
||||
- Tool Management
|
||||
- Real-time Communication
|
||||
- Health Monitoring
|
||||
- Rate Limiting
|
||||
- Authentication
|
||||
- Server-Sent Events (SSE)
|
||||
`,
|
||||
version: '1.0.0',
|
||||
contact: {
|
||||
name: 'Home Assistant MCP',
|
||||
url: 'https://github.com/your-repo/homeassistant-mcp'
|
||||
},
|
||||
license: {
|
||||
name: 'MIT',
|
||||
url: 'https://opensource.org/licenses/MIT'
|
||||
}
|
||||
},
|
||||
servers: [
|
||||
{
|
||||
url: 'http://localhost:3000',
|
||||
description: 'Local development server'
|
||||
}
|
||||
],
|
||||
paths: {
|
||||
'/health': {
|
||||
get: {
|
||||
tags: ['Health'],
|
||||
summary: 'Health check endpoint',
|
||||
description: 'Returns the current health status and version of the server',
|
||||
responses: {
|
||||
'200': {
|
||||
description: 'Server is healthy',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/HealthCheck'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'/api/tools': {
|
||||
get: {
|
||||
tags: ['Tools'],
|
||||
summary: 'List available tools',
|
||||
description: 'Returns a list of all registered tools and their capabilities',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
'200': {
|
||||
description: 'List of available tools',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
type: 'array',
|
||||
items: {
|
||||
$ref: '#/components/schemas/Tool'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'401': {
|
||||
description: 'Unauthorized',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'/api/mcp/execute': {
|
||||
post: {
|
||||
tags: ['MCP'],
|
||||
summary: 'Execute a tool command',
|
||||
description: 'Executes a command using a registered tool',
|
||||
security: [{ bearerAuth: [] }],
|
||||
requestBody: {
|
||||
required: true,
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/ExecuteRequest'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
responses: {
|
||||
'200': {
|
||||
description: 'Command executed successfully',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/ExecuteResponse'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'400': {
|
||||
description: 'Invalid request',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'401': {
|
||||
description: 'Unauthorized',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'/api/mcp/stream': {
|
||||
get: {
|
||||
tags: ['SSE'],
|
||||
summary: 'Stream events',
|
||||
description: 'Opens a Server-Sent Events connection for real-time updates',
|
||||
security: [{ bearerAuth: [] }],
|
||||
responses: {
|
||||
'200': {
|
||||
description: 'SSE stream established',
|
||||
content: {
|
||||
'text/event-stream': {
|
||||
schema: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
'401': {
|
||||
description: 'Unauthorized',
|
||||
content: {
|
||||
'application/json': {
|
||||
schema: {
|
||||
$ref: '#/components/schemas/Error'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
components: {
|
||||
schemas: {
|
||||
Error: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
code: {
|
||||
type: 'string',
|
||||
description: 'Error code'
|
||||
},
|
||||
message: {
|
||||
type: 'string',
|
||||
description: 'Error message'
|
||||
}
|
||||
},
|
||||
required: ['code', 'message']
|
||||
},
|
||||
HealthCheck: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
status: {
|
||||
type: 'string',
|
||||
enum: ['ok', 'error'],
|
||||
description: 'Current health status'
|
||||
},
|
||||
version: {
|
||||
type: 'string',
|
||||
description: 'Server version'
|
||||
}
|
||||
},
|
||||
required: ['status', 'version']
|
||||
},
|
||||
Tool: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
description: 'Tool name'
|
||||
},
|
||||
description: {
|
||||
type: 'string',
|
||||
description: 'Tool description'
|
||||
},
|
||||
parameters: {
|
||||
type: 'object',
|
||||
description: 'Tool parameters schema'
|
||||
},
|
||||
returns: {
|
||||
type: 'object',
|
||||
description: 'Tool return value schema'
|
||||
}
|
||||
},
|
||||
required: ['name', 'description']
|
||||
},
|
||||
ExecuteRequest: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
tool: {
|
||||
type: 'string',
|
||||
description: 'Name of the tool to execute'
|
||||
},
|
||||
params: {
|
||||
type: 'object',
|
||||
description: 'Tool parameters'
|
||||
}
|
||||
},
|
||||
required: ['tool']
|
||||
},
|
||||
ExecuteResponse: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
result: {
|
||||
type: 'object',
|
||||
description: 'Tool execution result'
|
||||
},
|
||||
error: {
|
||||
type: 'string',
|
||||
description: 'Error message if execution failed'
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
securitySchemes: {
|
||||
bearerAuth: {
|
||||
type: 'http',
|
||||
scheme: 'bearer',
|
||||
bearerFormat: 'JWT',
|
||||
description: 'JWT token for authentication'
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: [
|
||||
{
|
||||
name: 'Health',
|
||||
description: 'Health check endpoints for monitoring server status'
|
||||
},
|
||||
{
|
||||
name: 'MCP',
|
||||
description: 'Model Context Protocol endpoints for tool execution'
|
||||
},
|
||||
{
|
||||
name: 'Tools',
|
||||
description: 'Tool management endpoints for listing and configuring tools'
|
||||
},
|
||||
{
|
||||
name: 'SSE',
|
||||
description: 'Server-Sent Events endpoints for real-time updates'
|
||||
}
|
||||
],
|
||||
security: [
|
||||
{
|
||||
bearerAuth: []
|
||||
}
|
||||
]
|
||||
}
|
||||
79
src/schemas/config.schema.ts
Normal file
79
src/schemas/config.schema.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import { z } from 'zod';
|
||||
|
||||
export const RateLimitSchema = z.object({
|
||||
maxRequests: z.number().int().min(1).default(100),
|
||||
maxAuthRequests: z.number().int().min(1).default(5),
|
||||
});
|
||||
|
||||
export const MCPServerConfigSchema = z.object({
|
||||
// Server configuration
|
||||
port: z.number().int().min(1).max(65535).default(3000),
|
||||
environment: z.enum(['development', 'test', 'production']).default('development'),
|
||||
|
||||
// Execution settings
|
||||
executionTimeout: z.number().int().min(1000).max(300000).default(30000),
|
||||
streamingEnabled: z.boolean().default(false),
|
||||
|
||||
// Transport settings
|
||||
useStdioTransport: z.boolean().default(false),
|
||||
useHttpTransport: z.boolean().default(true),
|
||||
|
||||
// Debug and logging
|
||||
debugMode: z.boolean().default(false),
|
||||
debugStdio: z.boolean().default(false),
|
||||
debugHttp: z.boolean().default(false),
|
||||
silentStartup: z.boolean().default(false),
|
||||
|
||||
// CORS settings
|
||||
corsOrigin: z.string().default('*'),
|
||||
|
||||
// Rate limiting
|
||||
rateLimit: RateLimitSchema.default({
|
||||
maxRequests: 100,
|
||||
maxAuthRequests: 5,
|
||||
}),
|
||||
|
||||
// Speech features
|
||||
speech: z.object({
|
||||
enabled: z.boolean().default(false),
|
||||
wakeWord: z.object({
|
||||
enabled: z.boolean().default(false),
|
||||
threshold: z.number().min(0).max(1).default(0.05),
|
||||
}),
|
||||
asr: z.object({
|
||||
enabled: z.boolean().default(false),
|
||||
model: z.enum(['base', 'small', 'medium', 'large']).default('base'),
|
||||
engine: z.enum(['faster_whisper', 'whisper']).default('faster_whisper'),
|
||||
beamSize: z.number().int().min(1).max(10).default(5),
|
||||
computeType: z.enum(['float32', 'float16', 'int8']).default('float32'),
|
||||
language: z.string().default('en'),
|
||||
}),
|
||||
audio: z.object({
|
||||
minSpeechDuration: z.number().min(0.1).max(10).default(1.0),
|
||||
silenceDuration: z.number().min(0.1).max(5).default(0.5),
|
||||
sampleRate: z.number().int().min(8000).max(48000).default(16000),
|
||||
channels: z.number().int().min(1).max(2).default(1),
|
||||
chunkSize: z.number().int().min(256).max(4096).default(1024),
|
||||
}),
|
||||
}).default({
|
||||
enabled: false,
|
||||
wakeWord: { enabled: false, threshold: 0.05 },
|
||||
asr: {
|
||||
enabled: false,
|
||||
model: 'base',
|
||||
engine: 'faster_whisper',
|
||||
beamSize: 5,
|
||||
computeType: 'float32',
|
||||
language: 'en',
|
||||
},
|
||||
audio: {
|
||||
minSpeechDuration: 1.0,
|
||||
silenceDuration: 0.5,
|
||||
sampleRate: 16000,
|
||||
channels: 1,
|
||||
chunkSize: 1024,
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
export type MCPServerConfigType = z.infer<typeof MCPServerConfigSchema>;
|
||||
135
src/security/__tests__/enhanced-middleware.test.ts
Normal file
135
src/security/__tests__/enhanced-middleware.test.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
import { expect, test, describe, beforeEach, afterEach } from 'bun:test';
|
||||
import { SecurityMiddleware } from '../enhanced-middleware';
|
||||
|
||||
describe('Enhanced Security Middleware', () => {
|
||||
describe('Security Headers', () => {
|
||||
test('applies security headers correctly', () => {
|
||||
const request = new Request('http://localhost');
|
||||
SecurityMiddleware.applySecurityHeaders(request);
|
||||
|
||||
expect(request.headers.get('content-security-policy')).toBeDefined();
|
||||
expect(request.headers.get('x-frame-options')).toBe('DENY');
|
||||
expect(request.headers.get('strict-transport-security')).toBeDefined();
|
||||
expect(request.headers.get('x-xss-protection')).toBe('1; mode=block');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request Validation', () => {
|
||||
test('validates request size', async () => {
|
||||
const largeBody = 'x'.repeat(2 * 1024 * 1024); // 2MB
|
||||
const request = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'content-length': largeBody.length.toString()
|
||||
},
|
||||
body: JSON.stringify({ data: largeBody })
|
||||
});
|
||||
|
||||
await expect(SecurityMiddleware.validateRequest(request)).rejects.toThrow('Request body too large');
|
||||
});
|
||||
|
||||
test('validates URL length', async () => {
|
||||
const longUrl = 'http://localhost/' + 'x'.repeat(3000);
|
||||
const request = new Request(longUrl);
|
||||
|
||||
await expect(SecurityMiddleware.validateRequest(request)).rejects.toThrow('URL too long');
|
||||
});
|
||||
|
||||
test('validates and sanitizes POST request body', async () => {
|
||||
const request = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
name: '<script>alert("xss")</script>Hello',
|
||||
age: 25
|
||||
})
|
||||
});
|
||||
|
||||
await SecurityMiddleware.validateRequest(request);
|
||||
const body = await request.json();
|
||||
expect(body.name).not.toContain('<script>');
|
||||
expect(body.age).toBe(25);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Sanitization', () => {
|
||||
test('sanitizes string input', () => {
|
||||
const input = '<script>alert("xss")</script>Hello<img src="x" onerror="alert(1)">';
|
||||
const sanitized = SecurityMiddleware.sanitizeInput(input);
|
||||
expect(sanitized).toBe('Hello');
|
||||
});
|
||||
|
||||
test('sanitizes nested object input', () => {
|
||||
const input = {
|
||||
name: '<script>alert("xss")</script>John',
|
||||
details: {
|
||||
bio: '<img src="x" onerror="alert(1)">Web Developer'
|
||||
}
|
||||
};
|
||||
const sanitized = SecurityMiddleware.sanitizeInput(input) as any;
|
||||
expect(sanitized.name).toBe('John');
|
||||
expect(sanitized.details.bio).toBe('Web Developer');
|
||||
});
|
||||
|
||||
test('sanitizes array input', () => {
|
||||
const input = [
|
||||
'<script>alert(1)</script>Hello',
|
||||
'<img src="x" onerror="alert(1)">World'
|
||||
];
|
||||
const sanitized = SecurityMiddleware.sanitizeInput(input) as string[];
|
||||
expect(sanitized[0]).toBe('Hello');
|
||||
expect(sanitized[1]).toBe('World');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiting', () => {
|
||||
beforeEach(() => {
|
||||
// Reset rate limit stores before each test
|
||||
(SecurityMiddleware as any).rateLimitStore.clear();
|
||||
(SecurityMiddleware as any).authLimitStore.clear();
|
||||
});
|
||||
|
||||
test('enforces regular rate limits', () => {
|
||||
const ip = '127.0.0.1';
|
||||
|
||||
// Should allow up to 100 requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
expect(() => SecurityMiddleware.checkRateLimit(ip, false)).not.toThrow();
|
||||
}
|
||||
|
||||
// Should block the 101st request
|
||||
expect(() => SecurityMiddleware.checkRateLimit(ip, false)).toThrow('Too many requests');
|
||||
});
|
||||
|
||||
test('enforces stricter auth rate limits', () => {
|
||||
const ip = '127.0.0.1';
|
||||
|
||||
// Should allow up to 5 auth requests
|
||||
for (let i = 0; i < 5; i++) {
|
||||
expect(() => SecurityMiddleware.checkRateLimit(ip, true)).not.toThrow();
|
||||
}
|
||||
|
||||
// Should block the 6th auth request
|
||||
expect(() => SecurityMiddleware.checkRateLimit(ip, true)).toThrow('Too many authentication requests');
|
||||
});
|
||||
|
||||
test('resets rate limits after window expires', async () => {
|
||||
const ip = '127.0.0.1';
|
||||
|
||||
// Make max requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
SecurityMiddleware.checkRateLimit(ip, false);
|
||||
}
|
||||
|
||||
// Wait for rate limit window to expire
|
||||
const store = (SecurityMiddleware as any).rateLimitStore.get(ip);
|
||||
store.resetTime = Date.now() - 1000; // Set reset time to the past
|
||||
|
||||
// Should allow requests again
|
||||
expect(() => SecurityMiddleware.checkRateLimit(ip, false)).not.toThrow();
|
||||
});
|
||||
});
|
||||
});
|
||||
181
src/security/enhanced-middleware.ts
Normal file
181
src/security/enhanced-middleware.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import express, { Request, Response, NextFunction, Router } from 'express';
|
||||
import sanitizeHtml from 'sanitize-html';
|
||||
|
||||
// Custom error type with status code
|
||||
class SecurityError extends Error {
|
||||
constructor(public message: string, public statusCode: number) {
|
||||
super(message);
|
||||
this.name = 'SecurityError';
|
||||
}
|
||||
}
|
||||
|
||||
// Security configuration
|
||||
const SECURITY_CONFIG = {
|
||||
FRAME_OPTIONS: 'DENY',
|
||||
XSS_PROTECTION: '1; mode=block',
|
||||
REFERRER_POLICY: 'strict-origin-when-cross-origin',
|
||||
HSTS_MAX_AGE: 31536000, // 1 year in seconds
|
||||
CSP: {
|
||||
'default-src': ["'self'"],
|
||||
'script-src': ["'self'", "'unsafe-inline'"],
|
||||
'style-src': ["'self'", "'unsafe-inline'"],
|
||||
'img-src': ["'self'", 'data:', 'https:'],
|
||||
'font-src': ["'self'"],
|
||||
'connect-src': ["'self'"],
|
||||
'frame-ancestors': ["'none'"],
|
||||
'form-action': ["'self'"]
|
||||
},
|
||||
// Request validation config
|
||||
MAX_URL_LENGTH: 2048,
|
||||
MAX_BODY_SIZE: '50kb',
|
||||
// Rate limiting config
|
||||
RATE_LIMIT: {
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 50
|
||||
},
|
||||
AUTH_RATE_LIMIT: {
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 3
|
||||
}
|
||||
};
|
||||
|
||||
export class SecurityMiddleware {
|
||||
private static app: express.Express;
|
||||
private static requestCounts: Map<string, { count: number, resetTime: number }> = new Map();
|
||||
private static authRequestCounts: Map<string, { count: number, resetTime: number }> = new Map();
|
||||
|
||||
static initialize(app: express.Express): void {
|
||||
this.app = app;
|
||||
|
||||
// Body parser middleware with size limit
|
||||
app.use(express.json({
|
||||
limit: SECURITY_CONFIG.MAX_BODY_SIZE
|
||||
}));
|
||||
|
||||
// Error handling middleware for body-parser errors
|
||||
app.use((error: any, _req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
if (error) {
|
||||
return res.status(413).json({
|
||||
error: true,
|
||||
message: 'Request body too large'
|
||||
});
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
// Main security middleware
|
||||
app.use((req: Request, res: Response, next: NextFunction) => {
|
||||
try {
|
||||
// Apply security headers
|
||||
SecurityMiddleware.applySecurityHeaders(res);
|
||||
|
||||
// Check rate limits
|
||||
SecurityMiddleware.checkRateLimit(req);
|
||||
|
||||
// Validate request
|
||||
SecurityMiddleware.validateRequest(req);
|
||||
|
||||
// Sanitize input
|
||||
if (req.body) {
|
||||
req.body = SecurityMiddleware.sanitizeInput(req.body);
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (error) {
|
||||
if (error instanceof SecurityError) {
|
||||
res.status(error.statusCode).json({
|
||||
error: true,
|
||||
message: error.message
|
||||
});
|
||||
} else {
|
||||
res.status(500).json({
|
||||
error: true,
|
||||
message: 'Internal server error'
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private static validateRequest(req: Request): void {
|
||||
// Check URL length
|
||||
if (req.originalUrl.length > SECURITY_CONFIG.MAX_URL_LENGTH) {
|
||||
throw new SecurityError('URL too long', 413);
|
||||
}
|
||||
|
||||
// Check content type for POST requests
|
||||
if (req.method === 'POST' && req.headers['content-type'] !== 'application/json') {
|
||||
throw new SecurityError('Content-Type must be application/json', 415);
|
||||
}
|
||||
}
|
||||
|
||||
private static sanitizeInput(input: unknown): unknown {
|
||||
if (typeof input === 'string') {
|
||||
return sanitizeHtml(input, {
|
||||
allowedTags: [],
|
||||
allowedAttributes: {}
|
||||
});
|
||||
} else if (Array.isArray(input)) {
|
||||
return input.map(item => SecurityMiddleware.sanitizeInput(item));
|
||||
} else if (input && typeof input === 'object') {
|
||||
const sanitized: Record<string, unknown> = {};
|
||||
for (const [key, value] of Object.entries(input)) {
|
||||
sanitized[key] = SecurityMiddleware.sanitizeInput(value);
|
||||
}
|
||||
return sanitized;
|
||||
}
|
||||
return input;
|
||||
}
|
||||
|
||||
private static applySecurityHeaders(res: Response): void {
|
||||
// Remove X-Powered-By header
|
||||
res.removeHeader('X-Powered-By');
|
||||
|
||||
// Set security headers
|
||||
res.setHeader('X-Frame-Options', SECURITY_CONFIG.FRAME_OPTIONS);
|
||||
res.setHeader('X-XSS-Protection', SECURITY_CONFIG.XSS_PROTECTION);
|
||||
res.setHeader('X-Content-Type-Options', 'nosniff');
|
||||
res.setHeader('Referrer-Policy', SECURITY_CONFIG.REFERRER_POLICY);
|
||||
res.setHeader('Strict-Transport-Security', `max-age=${SECURITY_CONFIG.HSTS_MAX_AGE}; includeSubDomains; preload`);
|
||||
res.setHeader('X-Permitted-Cross-Domain-Policies', 'none');
|
||||
res.setHeader('Cross-Origin-Embedder-Policy', 'require-corp');
|
||||
res.setHeader('Cross-Origin-Opener-Policy', 'same-origin');
|
||||
res.setHeader('Cross-Origin-Resource-Policy', 'same-origin');
|
||||
res.setHeader('Origin-Agent-Cluster', '?1');
|
||||
|
||||
// Set Content-Security-Policy
|
||||
const cspDirectives = Object.entries(SECURITY_CONFIG.CSP)
|
||||
.map(([key, values]) => `${key} ${values.join(' ')}`)
|
||||
.join('; ');
|
||||
res.setHeader('Content-Security-Policy', cspDirectives);
|
||||
}
|
||||
|
||||
private static checkRateLimit(req: Request): void {
|
||||
const ip = req.ip || req.socket.remoteAddress || 'unknown';
|
||||
const now = Date.now();
|
||||
const isAuth = req.path.startsWith('/auth');
|
||||
const store = isAuth ? SecurityMiddleware.authRequestCounts : SecurityMiddleware.requestCounts;
|
||||
const config = isAuth ? SECURITY_CONFIG.AUTH_RATE_LIMIT : SECURITY_CONFIG.RATE_LIMIT;
|
||||
|
||||
let record = store.get(ip);
|
||||
if (!record || now > record.resetTime) {
|
||||
record = { count: 1, resetTime: now + config.windowMs };
|
||||
} else {
|
||||
record.count++;
|
||||
if (record.count > config.max) {
|
||||
throw new SecurityError(
|
||||
isAuth ? 'Too many authentication requests' : 'Too many requests',
|
||||
429
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
store.set(ip, record);
|
||||
}
|
||||
|
||||
// For testing purposes
|
||||
public static clearRateLimits(): void {
|
||||
SecurityMiddleware.requestCounts.clear();
|
||||
SecurityMiddleware.authRequestCounts.clear();
|
||||
}
|
||||
}
|
||||
341
src/sse/__tests__/sse.features.test.ts
Normal file
341
src/sse/__tests__/sse.features.test.ts
Normal file
@@ -0,0 +1,341 @@
|
||||
import { SSEManager } from "../index";
|
||||
import type { SSEClient } from "../index";
|
||||
import type { HassEntity, HassEvent } from "../../interfaces/hass";
|
||||
import { TokenManager } from "../../security/index";
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
mock,
|
||||
Mock,
|
||||
test,
|
||||
} from "bun:test";
|
||||
|
||||
describe("SSE Core Features", () => {
|
||||
let sseManager: SSEManager;
|
||||
const TEST_IP = "127.0.0.1";
|
||||
const validToken = "valid_token_that_meets_minimum_length_requirement_123456";
|
||||
let validateTokenMock: Mock<(token: string, ip?: string) => { valid: boolean; error?: string }>;
|
||||
|
||||
beforeEach(() => {
|
||||
sseManager = new SSEManager({
|
||||
pingInterval: 100, // Shorter interval for testing
|
||||
cleanupInterval: 200,
|
||||
maxConnectionAge: 1000,
|
||||
});
|
||||
|
||||
// Mock token validation to always succeed with our test token
|
||||
validateTokenMock = mock((token: string) => ({
|
||||
valid: token === validToken,
|
||||
error: token !== validToken ? "Invalid token" : undefined,
|
||||
}));
|
||||
TokenManager.validateToken = validateTokenMock;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
validateTokenMock.mockReset();
|
||||
});
|
||||
|
||||
function createTestClient(
|
||||
id: string,
|
||||
): Omit<SSEClient, "authenticated" | "subscriptions" | "rateLimit"> {
|
||||
return {
|
||||
id,
|
||||
ip: TEST_IP,
|
||||
connectedAt: new Date(),
|
||||
connectionTime: Date.now(),
|
||||
send: mock((data: string) => { }),
|
||||
};
|
||||
}
|
||||
|
||||
describe("State Management", () => {
|
||||
it("should track and update entity states", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
const entityId = "light.living_room";
|
||||
const initialState: HassEntity = {
|
||||
entity_id: entityId,
|
||||
state: "off",
|
||||
attributes: { brightness: 0 },
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
};
|
||||
|
||||
// Update state
|
||||
sseManager.updateEntityState(entityId, initialState);
|
||||
|
||||
// Subscribe client to entity
|
||||
sseManager.subscribeToEntity(sseClient.id, entityId);
|
||||
|
||||
// Verify initial state was sent
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(1);
|
||||
const sentData = JSON.parse(sendMock.mock.calls[0]?.[0]);
|
||||
expect(sentData.type).toBe("state_changed");
|
||||
expect(sentData.data.entity_id).toBe(entityId);
|
||||
expect(sentData.data.state).toBe("off");
|
||||
});
|
||||
|
||||
it("should handle state updates and notify subscribers", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
const entityId = "light.living_room";
|
||||
sseManager.subscribeToEntity(sseClient.id, entityId);
|
||||
|
||||
// Update state multiple times
|
||||
const states: HassEntity[] = [
|
||||
{
|
||||
entity_id: entityId,
|
||||
state: "off",
|
||||
attributes: { brightness: 0 },
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
},
|
||||
{
|
||||
entity_id: entityId,
|
||||
state: "on",
|
||||
attributes: { brightness: 100 },
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
},
|
||||
{
|
||||
entity_id: entityId,
|
||||
state: "on",
|
||||
attributes: { brightness: 50 },
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
},
|
||||
];
|
||||
|
||||
for (const state of states) {
|
||||
sseManager.updateEntityState(entityId, state);
|
||||
}
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(states.length);
|
||||
|
||||
// Verify last state
|
||||
const lastSentData = JSON.parse(sendMock.mock.calls[2]?.[0]);
|
||||
expect(lastSentData.data.state).toBe("on");
|
||||
expect(lastSentData.data.attributes.brightness).toBe(50);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Domain Subscriptions", () => {
|
||||
it("should handle domain-wide subscriptions", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
const domain = "light";
|
||||
sseManager.subscribeToDomain(sseClient.id, domain);
|
||||
|
||||
// Update states for multiple entities in the domain
|
||||
const entities = ["light.living_room", "light.kitchen", "light.bedroom"];
|
||||
for (const entityId of entities) {
|
||||
sseManager.updateEntityState(entityId, {
|
||||
entity_id: entityId,
|
||||
state: "on",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
});
|
||||
}
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(entities.length);
|
||||
|
||||
// Verify non-domain entities don't trigger updates
|
||||
sseManager.updateEntityState("switch.fan", {
|
||||
entity_id: "switch.fan",
|
||||
state: "on",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
});
|
||||
|
||||
expect(sendMock.mock.calls.length).toBe(entities.length); // Should not increase
|
||||
});
|
||||
});
|
||||
|
||||
describe("Connection Maintenance", () => {
|
||||
it("should send periodic pings to keep connections alive", async () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
// Wait for ping interval
|
||||
await new Promise((resolve) => setTimeout(resolve, 150));
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const pingData = JSON.parse(sendMock.mock.calls[0]?.[0]);
|
||||
expect(pingData.type).toBe("ping");
|
||||
expect(pingData.timestamp).toBeTruthy();
|
||||
});
|
||||
|
||||
it("should cleanup inactive connections", async () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
// Simulate connection age exceeding limit
|
||||
sseClient.connectedAt = new Date(Date.now() - 2000); // Older than maxConnectionAge
|
||||
|
||||
// Wait for cleanup interval
|
||||
await new Promise((resolve) => setTimeout(resolve, 250));
|
||||
|
||||
// Client should be removed
|
||||
expect(sseManager.getStatistics().totalClients).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handling", () => {
|
||||
it("should handle client send errors gracefully", async () => {
|
||||
const client = createTestClient("test-client");
|
||||
const errorMock = mock(() => {
|
||||
console.log("Mock send function throwing error");
|
||||
throw new Error("Send failed");
|
||||
});
|
||||
client.send = errorMock;
|
||||
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
if (!sseClient) {
|
||||
throw new Error("Failed to add client");
|
||||
}
|
||||
|
||||
// Subscribe to entity to ensure we get updates
|
||||
sseManager.subscribeToEntity(sseClient.id, "light.test");
|
||||
|
||||
// Get initial client count
|
||||
const initialCount = sseManager.getStatistics().totalClients;
|
||||
console.log(`Initial client count: ${initialCount}`);
|
||||
|
||||
// Attempt to send message
|
||||
sseManager.updateEntityState("light.test", {
|
||||
entity_id: "light.test",
|
||||
state: "on",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
});
|
||||
|
||||
// Wait for error handling to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 50));
|
||||
|
||||
// Verify error was thrown
|
||||
expect(errorMock).toHaveBeenCalled();
|
||||
|
||||
// Get final client count
|
||||
const finalCount = sseManager.getStatistics().totalClients;
|
||||
console.log(`Final client count: ${finalCount}`);
|
||||
|
||||
// Client should be removed due to send failure
|
||||
expect(finalCount).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle invalid entity updates", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
// Subscribe to entity
|
||||
const entityId = "light.test";
|
||||
sseManager.subscribeToEntity(sseClient.id, entityId);
|
||||
|
||||
// Update with invalid state
|
||||
const invalidState = {
|
||||
entity_id: entityId,
|
||||
state: undefined,
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
} as unknown as HassEntity;
|
||||
|
||||
sseManager.updateEntityState(entityId, invalidState);
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(0); // Should not send invalid state
|
||||
});
|
||||
});
|
||||
|
||||
describe("Memory Management", () => {
|
||||
it("should limit the number of stored entity states", () => {
|
||||
// Create many entities
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
sseManager.updateEntityState(`test.entity_${i}`, {
|
||||
entity_id: `test.entity_${i}`,
|
||||
state: "on",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
});
|
||||
}
|
||||
|
||||
// Check that stored states are within reasonable limits
|
||||
expect(Object.keys(sseManager["entityStates"]).length).toBeLessThanOrEqual(1000);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Concurrent Operations", () => {
|
||||
it("should handle multiple simultaneous subscriptions", () => {
|
||||
// Create and add clients
|
||||
const rawClients = Array.from({ length: 5 }, (_, i) => createTestClient(`client_${i}`));
|
||||
const clients = rawClients
|
||||
.map(client => sseManager.addClient(client, validToken))
|
||||
.filter((client): client is SSEClient => client !== null);
|
||||
|
||||
expect(clients.length).toBe(5);
|
||||
|
||||
// Subscribe all clients to same entity
|
||||
const entityId = "light.test";
|
||||
clients.forEach(client => {
|
||||
sseManager.subscribeToEntity(client.id, entityId);
|
||||
});
|
||||
|
||||
// Update entity state
|
||||
sseManager.updateEntityState(entityId, {
|
||||
entity_id: entityId,
|
||||
state: "on",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
context: { id: "test_context" },
|
||||
});
|
||||
|
||||
// Verify all clients received update
|
||||
rawClients.forEach(client => {
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// Future test cases to implement
|
||||
test.todo("should handle reconnection attempts with exponential backoff");
|
||||
test.todo("should properly clean up resources when client disconnects");
|
||||
test.todo("should handle message queuing when client temporarily disconnects");
|
||||
test.todo("should validate message format before sending to clients");
|
||||
test.todo("should handle client subscription to multiple domains");
|
||||
test.todo("should properly handle client unsubscribe requests");
|
||||
test.todo("should enforce per-domain rate limits");
|
||||
test.todo("should handle large numbers of concurrent state updates");
|
||||
test.todo("should maintain message order for each client");
|
||||
test.todo("should handle client authentication timeout");
|
||||
});
|
||||
114
src/sse/index.ts
114
src/sse/index.ts
@@ -250,6 +250,22 @@ export class SSEManager extends EventEmitter {
|
||||
|
||||
client.subscriptions.add(`domain:${domain}`);
|
||||
console.log(`Client ${clientId} subscribed to domain: ${domain}`);
|
||||
|
||||
// Send current states for all entities in domain
|
||||
this.entityStates.forEach((state, entityId) => {
|
||||
if (entityId.startsWith(`${domain}.`) && !this.isRateLimited(client)) {
|
||||
this.sendToClient(client, {
|
||||
type: "state_changed",
|
||||
data: {
|
||||
entity_id: state.entity_id,
|
||||
state: state.state,
|
||||
attributes: state.attributes,
|
||||
last_changed: state.last_changed,
|
||||
last_updated: state.last_updated,
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
subscribeToEvent(clientId: string, eventType: string): void {
|
||||
@@ -320,68 +336,66 @@ export class SSEManager extends EventEmitter {
|
||||
});
|
||||
}
|
||||
|
||||
private sendToClient(client: SSEClient, data: unknown): void {
|
||||
try {
|
||||
if (!client.authenticated) {
|
||||
console.warn(
|
||||
`Attempted to send message to unauthenticated client ${client.id}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.isRateLimited(client)) {
|
||||
console.warn(`Rate limit exceeded for client ${client.id}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const message = typeof data === "string" ? data : JSON.stringify(data);
|
||||
client.send(message);
|
||||
this.updateRateLimit(client);
|
||||
} catch (error) {
|
||||
console.error(`Failed to send message to client ${client.id}:`, error);
|
||||
this.removeClient(client.id);
|
||||
updateEntityState(entityId: string, state: HassEntity): void {
|
||||
if (!state || typeof state.state === 'undefined') {
|
||||
console.warn(`Invalid state update for entity ${entityId}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update state in memory
|
||||
this.entityStates.set(entityId, state);
|
||||
|
||||
// Notify subscribed clients
|
||||
this.clients.forEach((client) => {
|
||||
if (!client.authenticated || this.isRateLimited(client)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [domain] = entityId.split('.');
|
||||
if (
|
||||
client.subscriptions.has(`entity:${entityId}`) ||
|
||||
client.subscriptions.has(`domain:${domain}`)
|
||||
) {
|
||||
this.sendToClient(client, {
|
||||
type: "state_changed",
|
||||
data: {
|
||||
entity_id: state.entity_id,
|
||||
state: state.state,
|
||||
attributes: state.attributes,
|
||||
last_changed: state.last_changed,
|
||||
last_updated: state.last_updated,
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getStatistics(): {
|
||||
totalClients: number;
|
||||
authenticatedClients: number;
|
||||
clientStats: ClientStats[];
|
||||
subscriptionStats: { [key: string]: number };
|
||||
} {
|
||||
const now = Date.now();
|
||||
const clientStats: ClientStats[] = [];
|
||||
const subscriptionStats: { [key: string]: number } = {};
|
||||
let authenticatedClients = 0;
|
||||
|
||||
getStatistics(): { totalClients: number; authenticatedClients: number } {
|
||||
let authenticatedCount = 0;
|
||||
this.clients.forEach((client) => {
|
||||
if (client.authenticated) {
|
||||
authenticatedClients++;
|
||||
authenticatedCount++;
|
||||
}
|
||||
|
||||
clientStats.push({
|
||||
id: client.id,
|
||||
ip: client.ip,
|
||||
connectedAt: client.connectedAt,
|
||||
lastPingAt: client.lastPingAt,
|
||||
subscriptionCount: client.subscriptions.size,
|
||||
connectionDuration: now - client.connectedAt.getTime(),
|
||||
messagesSent: client.rateLimit.count,
|
||||
lastActivity: new Date(client.rateLimit.lastReset),
|
||||
});
|
||||
|
||||
client.subscriptions.forEach((sub) => {
|
||||
subscriptionStats[sub] = (subscriptionStats[sub] || 0) + 1;
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
totalClients: this.clients.size,
|
||||
authenticatedClients,
|
||||
clientStats,
|
||||
subscriptionStats,
|
||||
authenticatedClients: authenticatedCount,
|
||||
};
|
||||
}
|
||||
|
||||
private sendToClient(client: SSEClient, data: any): void {
|
||||
try {
|
||||
console.log(`Attempting to send data to client ${client.id}`);
|
||||
client.send(JSON.stringify(data));
|
||||
this.updateRateLimit(client);
|
||||
} catch (error) {
|
||||
console.error(`Failed to send data to client ${client.id}:`, error);
|
||||
console.log(`Removing client ${client.id} due to send error`);
|
||||
this.removeClient(client.id);
|
||||
console.log(`Client count after removal: ${this.clients.size}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const sseManager = SSEManager.getInstance();
|
||||
|
||||
188
src/stdio-server.ts
Normal file
188
src/stdio-server.ts
Normal file
@@ -0,0 +1,188 @@
|
||||
/**
|
||||
* MCP Server with stdio transport
|
||||
*
|
||||
* This module provides a standalone MCP server that communicates
|
||||
* over standard input/output using JSON-RPC 2.0 protocol.
|
||||
*/
|
||||
|
||||
// Only force silent logging if not in Cursor compatibility mode
|
||||
if (!process.env.CURSOR_COMPATIBLE) {
|
||||
process.env.LOG_LEVEL = 'silent';
|
||||
}
|
||||
|
||||
import { createStdioServer, BaseTool } from "./mcp/index.js";
|
||||
import { z } from "zod";
|
||||
import { logger } from "./utils/logger.js";
|
||||
import { MCPContext } from "./mcp/types.js";
|
||||
|
||||
// Import Home Assistant tools
|
||||
import { LightsControlTool } from './tools/homeassistant/lights.tool.js';
|
||||
import { ClimateControlTool } from './tools/homeassistant/climate.tool.js';
|
||||
|
||||
// Check for Cursor compatibility mode
|
||||
const isCursorMode = process.env.CURSOR_COMPATIBLE === 'true';
|
||||
// Use silent startup except in Cursor mode
|
||||
const silentStartup = !isCursorMode;
|
||||
const debugMode = process.env.DEBUG_STDIO === 'true';
|
||||
|
||||
// Configure raw I/O handling if necessary
|
||||
if (isCursorMode) {
|
||||
// Ensure stdout doesn't buffer for Cursor
|
||||
process.stdout.setDefaultEncoding('utf8');
|
||||
// Only try to set raw mode if it's a TTY and the method exists
|
||||
if (process.stdout.isTTY && typeof (process.stdout as any).setRawMode === 'function') {
|
||||
(process.stdout as any).setRawMode(true);
|
||||
}
|
||||
}
|
||||
|
||||
// Send a notification directly to stdout for compatibility
|
||||
function sendNotification(method: string, params: any): void {
|
||||
const notification = {
|
||||
jsonrpc: '2.0',
|
||||
method,
|
||||
params
|
||||
};
|
||||
const message = JSON.stringify(notification) + '\n';
|
||||
process.stdout.write(message);
|
||||
|
||||
// For Cursor mode, ensure messages are flushed if method exists
|
||||
if (isCursorMode && typeof (process.stdout as any).flush === 'function') {
|
||||
(process.stdout as any).flush();
|
||||
}
|
||||
}
|
||||
|
||||
// Create system tools
|
||||
class InfoTool extends BaseTool {
|
||||
constructor() {
|
||||
super({
|
||||
name: "system_info",
|
||||
description: "Get information about the Home Assistant MCP server",
|
||||
parameters: z.object({}).optional(),
|
||||
metadata: {
|
||||
category: "system",
|
||||
version: "1.0.0",
|
||||
tags: ["system", "info"]
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
execute(_params: any, _context: MCPContext): any {
|
||||
return {
|
||||
version: "1.0.0",
|
||||
name: "Home Assistant MCP Server",
|
||||
mode: "stdio",
|
||||
transport: "json-rpc-2.0",
|
||||
features: ["streaming", "middleware", "validation"],
|
||||
timestamp: new Date().toISOString(),
|
||||
homeAssistant: {
|
||||
available: true,
|
||||
toolCount: 2,
|
||||
toolNames: ["lights_control", "climate_control"]
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
try {
|
||||
// Create system tools
|
||||
const systemTools = [
|
||||
new InfoTool()
|
||||
];
|
||||
|
||||
// Create Home Assistant tools
|
||||
const haTools = [
|
||||
new LightsControlTool(),
|
||||
new ClimateControlTool()
|
||||
];
|
||||
|
||||
// Combine all tools
|
||||
const allTools = [...systemTools, ...haTools];
|
||||
|
||||
// Send initial notifications BEFORE server initialization for Cursor compatibility
|
||||
// Send system info
|
||||
sendNotification('system.info', {
|
||||
name: 'Home Assistant Model Context Protocol Server',
|
||||
version: '1.0.0',
|
||||
transport: 'stdio',
|
||||
protocol: 'json-rpc-2.0',
|
||||
features: ['streaming'],
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
// Send available tools
|
||||
const toolDefinitions = allTools.map(tool => ({
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: []
|
||||
},
|
||||
metadata: tool.metadata
|
||||
}));
|
||||
|
||||
sendNotification('tools.available', {
|
||||
tools: toolDefinitions
|
||||
});
|
||||
|
||||
// Create server with stdio transport
|
||||
const { server, transport } = createStdioServer({
|
||||
silent: silentStartup,
|
||||
debug: debugMode,
|
||||
tools: allTools
|
||||
});
|
||||
|
||||
// Explicitly set the server reference to ensure access to tools
|
||||
if ('setServer' in transport && typeof transport.setServer === 'function') {
|
||||
transport.setServer(server);
|
||||
}
|
||||
|
||||
// Start the server after initial notifications
|
||||
await server.start();
|
||||
|
||||
// In Cursor mode, send notifications again after startup
|
||||
if (isCursorMode) {
|
||||
// Small delay to ensure all messages are processed
|
||||
setTimeout(() => {
|
||||
// Send system info again
|
||||
sendNotification('system.info', {
|
||||
name: 'Home Assistant Model Context Protocol Server',
|
||||
version: '1.0.0',
|
||||
transport: 'stdio',
|
||||
protocol: 'json-rpc-2.0',
|
||||
features: ['streaming'],
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
// Send available tools again
|
||||
sendNotification('tools.available', {
|
||||
tools: toolDefinitions
|
||||
});
|
||||
}, 100);
|
||||
}
|
||||
|
||||
// Handle process exit
|
||||
process.on('SIGINT', async () => {
|
||||
await server.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
process.on('SIGTERM', async () => {
|
||||
await server.shutdown();
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Keep process alive
|
||||
process.stdin.resume();
|
||||
} catch (error) {
|
||||
logger.error("Error starting Home Assistant MCP stdio server:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Run the main function
|
||||
main().catch(error => {
|
||||
logger.error("Uncaught error:", error);
|
||||
process.exit(1);
|
||||
});
|
||||
242
src/tools/base-tool.ts
Normal file
242
src/tools/base-tool.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
/**
|
||||
* Base Tool Class
|
||||
*
|
||||
* This abstract class provides common functionality for all tools,
|
||||
* including parameter validation, execution context, error handling,
|
||||
* and support for streaming responses.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import {
|
||||
ToolDefinition,
|
||||
ToolMetadata,
|
||||
MCPContext,
|
||||
MCPStreamPart,
|
||||
MCPErrorCode
|
||||
} from "../mcp/types.js";
|
||||
|
||||
/**
|
||||
* Abstract base class for all tools
|
||||
*/
|
||||
export abstract class BaseTool implements ToolDefinition {
|
||||
public name: string;
|
||||
public description: string;
|
||||
public parameters?: z.ZodType<any>;
|
||||
public returnType?: z.ZodType<any>;
|
||||
public metadata?: ToolMetadata;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
constructor(props: {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters?: z.ZodType<any>;
|
||||
returnType?: z.ZodType<any>;
|
||||
metadata?: Partial<ToolMetadata>;
|
||||
}) {
|
||||
this.name = props.name;
|
||||
this.description = props.description;
|
||||
this.parameters = props.parameters;
|
||||
this.returnType = props.returnType;
|
||||
|
||||
// Set default metadata
|
||||
this.metadata = {
|
||||
category: "general",
|
||||
version: "1.0.0",
|
||||
...props.metadata
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Main execute method to be implemented by subclasses
|
||||
*/
|
||||
public abstract execute(params: any, context: MCPContext): Promise<any>;
|
||||
|
||||
/**
|
||||
* Validate parameters against schema
|
||||
*/
|
||||
protected validateParams(params: any): any {
|
||||
if (!this.parameters) {
|
||||
return params;
|
||||
}
|
||||
|
||||
try {
|
||||
return this.parameters.parse(params);
|
||||
} catch (error) {
|
||||
throw {
|
||||
code: MCPErrorCode.VALIDATION_ERROR,
|
||||
message: `Invalid parameters for tool '${this.name}'`,
|
||||
data: error
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate result against schema
|
||||
*/
|
||||
protected validateResult(result: any): any {
|
||||
if (!this.returnType) {
|
||||
return result;
|
||||
}
|
||||
|
||||
try {
|
||||
return this.returnType.parse(result);
|
||||
} catch (error) {
|
||||
throw {
|
||||
code: MCPErrorCode.VALIDATION_ERROR,
|
||||
message: `Invalid result from tool '${this.name}'`,
|
||||
data: error
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a streaming response part
|
||||
*/
|
||||
protected sendStreamPart(data: any, context: MCPContext, isFinal: boolean = false): void {
|
||||
// Get requestId from context
|
||||
const { requestId, server } = context;
|
||||
|
||||
// Get active transports with streaming support
|
||||
const streamingTransports = Array.from(server["transports"])
|
||||
.filter(transport => !!transport.sendStreamPart);
|
||||
|
||||
if (streamingTransports.length === 0) {
|
||||
context.logger.warn(
|
||||
`Tool '${this.name}' attempted to stream, but no transports support streaming`
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
// Create stream part message
|
||||
const streamPart: MCPStreamPart = {
|
||||
id: requestId,
|
||||
partId: uuidv4(),
|
||||
final: isFinal,
|
||||
data: data
|
||||
};
|
||||
|
||||
// Send to all transports with streaming support
|
||||
for (const transport of streamingTransports) {
|
||||
transport.sendStreamPart(streamPart);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a streaming executor wrapper
|
||||
*/
|
||||
protected createStreamingExecutor<T>(
|
||||
generator: (params: any, context: MCPContext) => AsyncGenerator<T, T, void>,
|
||||
context: MCPContext
|
||||
): (params: any) => Promise<T> {
|
||||
return async (params: any): Promise<T> => {
|
||||
const validParams = this.validateParams(params);
|
||||
let finalResult: T | undefined = undefined;
|
||||
|
||||
try {
|
||||
const gen = generator(validParams, context);
|
||||
|
||||
for await (const chunk of gen) {
|
||||
// Send intermediate result
|
||||
this.sendStreamPart(chunk, context, false);
|
||||
finalResult = chunk;
|
||||
}
|
||||
|
||||
if (finalResult !== undefined) {
|
||||
// Validate and send final result
|
||||
const validResult = this.validateResult(finalResult);
|
||||
this.sendStreamPart(validResult, context, true);
|
||||
return validResult;
|
||||
}
|
||||
|
||||
throw new Error("Streaming generator did not produce a final result");
|
||||
} catch (error) {
|
||||
context.logger.error(`Error in streaming tool '${this.name}':`, error);
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert tool to SchemaObject format (for Claude and OpenAI)
|
||||
*/
|
||||
public toSchemaObject(): any {
|
||||
// Convert Zod schema to JSON Schema for parameters
|
||||
const parametersSchema = this.parameters ? this.zodToJsonSchema(this.parameters) : {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: []
|
||||
};
|
||||
|
||||
return {
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
parameters: parametersSchema
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Zod schema to JSON Schema (simplified)
|
||||
*/
|
||||
private zodToJsonSchema(schema: z.ZodType<any>): any {
|
||||
// This is a simplified conversion - in production you'd want a full implementation
|
||||
// or use a library like zod-to-json-schema
|
||||
|
||||
// Basic implementation just to support our needs
|
||||
if (schema instanceof z.ZodObject) {
|
||||
const shape = (schema as any)._def.shape();
|
||||
const properties: Record<string, any> = {};
|
||||
const required: string[] = [];
|
||||
|
||||
for (const [key, value] of Object.entries(shape)) {
|
||||
// Add to required array if the field is required
|
||||
if (!(value instanceof z.ZodOptional)) {
|
||||
required.push(key);
|
||||
}
|
||||
|
||||
// Convert property - explicitly cast value to ZodType to fix linter error
|
||||
properties[key] = this.zodTypeToJsonType(value as z.ZodType<any>);
|
||||
}
|
||||
|
||||
return {
|
||||
type: "object",
|
||||
properties,
|
||||
required: required.length > 0 ? required : undefined
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback for other schema types
|
||||
return { type: "object" };
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert Zod type to JSON Schema type (simplified)
|
||||
*/
|
||||
private zodTypeToJsonType(zodType: z.ZodType<any>): any {
|
||||
if (zodType instanceof z.ZodString) {
|
||||
return { type: "string" };
|
||||
} else if (zodType instanceof z.ZodNumber) {
|
||||
return { type: "number" };
|
||||
} else if (zodType instanceof z.ZodBoolean) {
|
||||
return { type: "boolean" };
|
||||
} else if (zodType instanceof z.ZodArray) {
|
||||
return {
|
||||
type: "array",
|
||||
items: this.zodTypeToJsonType((zodType as any)._def.type)
|
||||
};
|
||||
} else if (zodType instanceof z.ZodEnum) {
|
||||
return {
|
||||
type: "string",
|
||||
enum: (zodType as any)._def.values
|
||||
};
|
||||
} else if (zodType instanceof z.ZodOptional) {
|
||||
return this.zodTypeToJsonType((zodType as any)._def.innerType);
|
||||
} else if (zodType instanceof z.ZodObject) {
|
||||
return this.zodToJsonSchema(zodType);
|
||||
}
|
||||
|
||||
return { type: "object" };
|
||||
}
|
||||
}
|
||||
168
src/tools/example.tool.ts
Normal file
168
src/tools/example.tool.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
/**
|
||||
* Example Tool Implementation
|
||||
*
|
||||
* This file demonstrates how to create tools using the new BaseTool class,
|
||||
* including streaming responses and parameter validation.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
import { BaseTool } from "../mcp/index.js";
|
||||
import { MCPContext } from "../mcp/types.js";
|
||||
|
||||
/**
|
||||
* Example streaming tool that generates a series of responses
|
||||
*/
|
||||
export class StreamGeneratorTool extends BaseTool {
|
||||
constructor() {
|
||||
super({
|
||||
name: "stream_generator",
|
||||
description: "Generate a stream of data with configurable delay and count",
|
||||
parameters: z.object({
|
||||
count: z.number().int().min(1).max(20).default(5)
|
||||
.describe("Number of items to generate (1-20)"),
|
||||
delay: z.number().int().min(100).max(2000).default(500)
|
||||
.describe("Delay in ms between items (100-2000)"),
|
||||
prefix: z.string().optional().default("Item")
|
||||
.describe("Optional prefix for item labels")
|
||||
}),
|
||||
metadata: {
|
||||
category: "examples",
|
||||
version: "1.0.0",
|
||||
tags: ["streaming", "demo"],
|
||||
isStreaming: true
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute method that demonstrates streaming capabilities
|
||||
*/
|
||||
async execute(params: {
|
||||
count: number;
|
||||
delay: number;
|
||||
prefix: string;
|
||||
}, context: MCPContext): Promise<any> {
|
||||
// Create streaming executor from generator function
|
||||
const streamingExecutor = this.createStreamingExecutor(
|
||||
this.generateItems.bind(this),
|
||||
context
|
||||
);
|
||||
|
||||
// Execute with validated parameters
|
||||
return streamingExecutor(params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generator function that produces stream parts
|
||||
*/
|
||||
private async *generateItems(params: {
|
||||
count: number;
|
||||
delay: number;
|
||||
prefix: string;
|
||||
}, context: MCPContext): AsyncGenerator<any, any, void> {
|
||||
const { count, delay, prefix } = params;
|
||||
const results = [];
|
||||
|
||||
// Helper function to create a delay
|
||||
const sleep = (ms: number) => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
// Generate items with delay
|
||||
for (let i = 1; i <= count; i++) {
|
||||
// Sleep to simulate async work
|
||||
await sleep(delay);
|
||||
|
||||
// Create an item
|
||||
const item = {
|
||||
id: i,
|
||||
label: `${prefix} ${i}`,
|
||||
timestamp: new Date().toISOString(),
|
||||
progress: Math.round((i / count) * 100)
|
||||
};
|
||||
|
||||
results.push(item);
|
||||
|
||||
// Yield current results for streaming
|
||||
yield {
|
||||
items: [...results],
|
||||
completed: i,
|
||||
total: count,
|
||||
progress: Math.round((i / count) * 100)
|
||||
};
|
||||
}
|
||||
|
||||
// Final result - this will also be returned from the execute method
|
||||
return {
|
||||
items: results,
|
||||
completed: count,
|
||||
total: count,
|
||||
progress: 100,
|
||||
finished: true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Example tool that validates complex input
|
||||
*/
|
||||
export class ValidationDemoTool extends BaseTool {
|
||||
constructor() {
|
||||
super({
|
||||
name: "validation_demo",
|
||||
description: "Demonstrates parameter validation with Zod schemas",
|
||||
parameters: z.object({
|
||||
user: z.object({
|
||||
name: z.string().min(2).max(50),
|
||||
email: z.string().email(),
|
||||
age: z.number().int().min(13).optional()
|
||||
}).describe("User information"),
|
||||
preferences: z.object({
|
||||
theme: z.enum(["light", "dark", "system"]).default("system"),
|
||||
notifications: z.boolean().default(true)
|
||||
}).optional().describe("User preferences"),
|
||||
tags: z.array(z.string()).min(1).max(5).optional()
|
||||
.describe("Optional list of tags (1-5)")
|
||||
}),
|
||||
metadata: {
|
||||
category: "examples",
|
||||
version: "1.0.0",
|
||||
tags: ["validation", "demo"]
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute method that demonstrates parameter validation
|
||||
*/
|
||||
async execute(params: {
|
||||
user: {
|
||||
name: string;
|
||||
email: string;
|
||||
age?: number;
|
||||
},
|
||||
preferences?: {
|
||||
theme: "light" | "dark" | "system";
|
||||
notifications: boolean;
|
||||
},
|
||||
tags?: string[];
|
||||
}, context: MCPContext): Promise<any> {
|
||||
// We don't need to validate here since the BaseTool does it for us
|
||||
// This just demonstrates how validated parameters look
|
||||
|
||||
// Access validated and defaulted parameters
|
||||
const { user, preferences, tags } = params;
|
||||
|
||||
// Wait to simulate async processing
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
|
||||
// Return validated data with additional information
|
||||
return {
|
||||
validated: true,
|
||||
timestamp: new Date().toISOString(),
|
||||
requestId: context.requestId,
|
||||
user,
|
||||
preferences: preferences || { theme: "system", notifications: true },
|
||||
tags: tags || [],
|
||||
message: `Hello ${user.name}, your validation was successful!`
|
||||
};
|
||||
}
|
||||
}
|
||||
115
src/tools/examples/stream-generator.tool.ts
Normal file
115
src/tools/examples/stream-generator.tool.ts
Normal file
@@ -0,0 +1,115 @@
|
||||
/**
|
||||
* Example Tool: Stream Generator
|
||||
*
|
||||
* This tool demonstrates how to implement streaming functionality in MCP tools.
|
||||
* It generates a stream of data that can be consumed by clients in real-time.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { BaseTool } from '../../mcp/BaseTool.js';
|
||||
import { MCPResponseStream } from '../../mcp/types.js';
|
||||
|
||||
// Schema for the stream generator parameters
|
||||
const streamGeneratorSchema = z.object({
|
||||
count: z.number().int().min(1).max(100).default(10)
|
||||
.describe('Number of items to generate in the stream (1-100)'),
|
||||
|
||||
delay: z.number().int().min(100).max(2000).default(500)
|
||||
.describe('Delay between items in milliseconds (100-2000)'),
|
||||
|
||||
includeTimestamp: z.boolean().default(false)
|
||||
.describe('Whether to include timestamp with each streamed item'),
|
||||
|
||||
failAfter: z.number().int().min(0).default(0)
|
||||
.describe('If greater than 0, fail after this many items (for error handling testing)')
|
||||
});
|
||||
|
||||
// Define the parameter and result types
|
||||
type StreamGeneratorParams = z.infer<typeof streamGeneratorSchema>;
|
||||
type StreamGeneratorResult = {
|
||||
message: string;
|
||||
count: number;
|
||||
timestamp?: string;
|
||||
items: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* A tool that demonstrates streaming capabilities by generating a stream of data
|
||||
* with configurable parameters for count, delay, and error scenarios.
|
||||
*/
|
||||
export class StreamGeneratorTool extends BaseTool<StreamGeneratorParams, StreamGeneratorResult> {
|
||||
constructor() {
|
||||
super({
|
||||
name: 'stream_generator',
|
||||
description: 'Generates a stream of data with configurable delay and count',
|
||||
version: '1.0.0',
|
||||
parameters: streamGeneratorSchema,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool and stream results back to the client
|
||||
*/
|
||||
async execute(
|
||||
params: StreamGeneratorParams,
|
||||
stream?: MCPResponseStream
|
||||
): Promise<StreamGeneratorResult> {
|
||||
const { count, delay, includeTimestamp, failAfter } = params;
|
||||
const items: string[] = [];
|
||||
|
||||
// If we have a stream, use it to send intermediate results
|
||||
if (stream) {
|
||||
for (let i = 1; i <= count; i++) {
|
||||
// Simulate a processing delay
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
|
||||
// Check if we should fail for testing error handling
|
||||
if (failAfter > 0 && i > failAfter) {
|
||||
throw new Error(`Intentional failure after ${failAfter} items (for testing)`);
|
||||
}
|
||||
|
||||
const item = `Item ${i} of ${count}`;
|
||||
items.push(item);
|
||||
|
||||
// Create the intermediate result
|
||||
const partialResult: Partial<StreamGeneratorResult> = {
|
||||
message: `Generated ${i} of ${count} items`,
|
||||
count: i,
|
||||
items: [...items]
|
||||
};
|
||||
|
||||
// Add timestamp if requested
|
||||
if (includeTimestamp) {
|
||||
partialResult.timestamp = new Date().toISOString();
|
||||
}
|
||||
|
||||
// Stream the intermediate result
|
||||
stream.write(partialResult);
|
||||
}
|
||||
} else {
|
||||
// No streaming, generate all items at once with delay between
|
||||
for (let i = 1; i <= count; i++) {
|
||||
await new Promise(resolve => setTimeout(resolve, delay));
|
||||
|
||||
if (failAfter > 0 && i > failAfter) {
|
||||
throw new Error(`Intentional failure after ${failAfter} items (for testing)`);
|
||||
}
|
||||
|
||||
items.push(`Item ${i} of ${count}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Return the final result
|
||||
const result: StreamGeneratorResult = {
|
||||
message: `Successfully generated ${count} items`,
|
||||
count,
|
||||
items
|
||||
};
|
||||
|
||||
if (includeTimestamp) {
|
||||
result.timestamp = new Date().toISOString();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
92
src/tools/examples/validation-demo.tool.ts
Normal file
92
src/tools/examples/validation-demo.tool.ts
Normal file
@@ -0,0 +1,92 @@
|
||||
/**
|
||||
* Example Tool: Validation Demo
|
||||
*
|
||||
* This tool demonstrates how to implement validation using Zod schemas
|
||||
* in MCP tools. It provides examples of different validation rules and
|
||||
* how they can be applied to tool parameters.
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
import { BaseTool } from '../../mcp/BaseTool.js';
|
||||
|
||||
// Define a complex schema with various validation rules
|
||||
const validationDemoSchema = z.object({
|
||||
// String validations
|
||||
email: z.string().email()
|
||||
.describe('An email address to validate'),
|
||||
|
||||
url: z.string().url().optional()
|
||||
.describe('Optional URL to validate'),
|
||||
|
||||
// Number validations
|
||||
age: z.number().int().min(18).max(120)
|
||||
.describe('Age (must be between 18-120)'),
|
||||
|
||||
score: z.number().min(0).max(100).default(50)
|
||||
.describe('Score from 0-100'),
|
||||
|
||||
// Array validations
|
||||
tags: z.array(z.string().min(2).max(20))
|
||||
.min(1).max(5)
|
||||
.describe('Between 1-5 tags, each 2-20 characters'),
|
||||
|
||||
// Enum validations
|
||||
role: z.enum(['admin', 'user', 'guest'])
|
||||
.describe('User role (admin, user, or guest)'),
|
||||
|
||||
// Object validations
|
||||
preferences: z.object({
|
||||
theme: z.enum(['light', 'dark', 'system']).default('system')
|
||||
.describe('UI theme preference'),
|
||||
notifications: z.boolean().default(true)
|
||||
.describe('Whether to enable notifications'),
|
||||
language: z.string().default('en')
|
||||
.describe('Preferred language code')
|
||||
}).optional()
|
||||
.describe('Optional user preferences')
|
||||
});
|
||||
|
||||
// Define types based on the schema
|
||||
type ValidationDemoParams = z.infer<typeof validationDemoSchema>;
|
||||
type ValidationDemoResult = {
|
||||
valid: boolean;
|
||||
message: string;
|
||||
validatedData: ValidationDemoParams;
|
||||
metadata: {
|
||||
fieldsValidated: string[];
|
||||
timestamp: string;
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* A tool that demonstrates parameter validation using Zod schemas
|
||||
*/
|
||||
export class ValidationDemoTool extends BaseTool<ValidationDemoParams, ValidationDemoResult> {
|
||||
constructor() {
|
||||
super({
|
||||
name: 'validation_demo',
|
||||
description: 'Demonstrates parameter validation using Zod schemas',
|
||||
version: '1.0.0',
|
||||
parameters: validationDemoSchema,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the validation demo tool
|
||||
*/
|
||||
async execute(params: ValidationDemoParams): Promise<ValidationDemoResult> {
|
||||
// Get all field names that were validated
|
||||
const fieldsValidated = Object.keys(params);
|
||||
|
||||
// Process the validated data (in a real tool, this would do something useful)
|
||||
return {
|
||||
valid: true,
|
||||
message: 'All parameters successfully validated',
|
||||
validatedData: params,
|
||||
metadata: {
|
||||
fieldsValidated,
|
||||
timestamp: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
403
src/tools/homeassistant/climate.tool.ts
Normal file
403
src/tools/homeassistant/climate.tool.ts
Normal file
@@ -0,0 +1,403 @@
|
||||
/**
|
||||
* Climate Control Tool for Home Assistant
|
||||
*
|
||||
* This tool allows controlling climate devices (thermostats, AC units, etc.)
|
||||
* in Home Assistant through the MCP. It supports modes, temperature settings,
|
||||
* and fan modes.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
import { BaseTool } from "../base-tool.js";
|
||||
import { logger } from "../../utils/logger.js";
|
||||
import { MCPContext } from "../../mcp/types.js";
|
||||
|
||||
// Mock Home Assistant API service in absence of actual HA integration
|
||||
class MockHAClimateService {
|
||||
private climateDevices: Map<string, {
|
||||
state: "on" | "off";
|
||||
hvac_mode: "off" | "heat" | "cool" | "auto" | "dry" | "fan_only";
|
||||
temperature?: number;
|
||||
target_temp_high?: number;
|
||||
target_temp_low?: number;
|
||||
fan_mode?: "auto" | "low" | "medium" | "high";
|
||||
friendly_name: string;
|
||||
supported_features: string[];
|
||||
current_temperature?: number;
|
||||
humidity?: number;
|
||||
}>;
|
||||
|
||||
constructor() {
|
||||
// Initialize with some mock climate devices
|
||||
this.climateDevices = new Map([
|
||||
["climate.living_room", {
|
||||
state: "on",
|
||||
hvac_mode: "cool",
|
||||
temperature: 72,
|
||||
fan_mode: "auto",
|
||||
friendly_name: "Living Room Thermostat",
|
||||
supported_features: ["target_temperature", "fan_mode"],
|
||||
current_temperature: 75
|
||||
}],
|
||||
["climate.bedroom", {
|
||||
state: "off",
|
||||
hvac_mode: "off",
|
||||
temperature: 68,
|
||||
fan_mode: "low",
|
||||
friendly_name: "Bedroom Thermostat",
|
||||
supported_features: ["target_temperature", "fan_mode"],
|
||||
current_temperature: 70
|
||||
}],
|
||||
["climate.kitchen", {
|
||||
state: "on",
|
||||
hvac_mode: "heat",
|
||||
temperature: 70,
|
||||
fan_mode: "medium",
|
||||
friendly_name: "Kitchen Thermostat",
|
||||
supported_features: ["target_temperature", "fan_mode"],
|
||||
current_temperature: 68,
|
||||
humidity: 45
|
||||
}],
|
||||
["climate.office", {
|
||||
state: "on",
|
||||
hvac_mode: "auto",
|
||||
target_temp_high: 78,
|
||||
target_temp_low: 70,
|
||||
fan_mode: "auto",
|
||||
friendly_name: "Office Thermostat",
|
||||
supported_features: ["target_temperature_range", "fan_mode"],
|
||||
current_temperature: 72,
|
||||
humidity: 40
|
||||
}]
|
||||
]);
|
||||
}
|
||||
|
||||
// Get all climate devices
|
||||
public getClimateDevices(): Record<string, unknown>[] {
|
||||
const result = [];
|
||||
for (const [entity_id, device] of this.climateDevices.entries()) {
|
||||
result.push({
|
||||
entity_id,
|
||||
state: device.state,
|
||||
attributes: {
|
||||
...device,
|
||||
friendly_name: device.friendly_name
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get a specific climate device
|
||||
public getClimateDevice(entity_id: string): Record<string, unknown> | null {
|
||||
const device = this.climateDevices.get(entity_id);
|
||||
if (!device) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
entity_id,
|
||||
state: device.state,
|
||||
attributes: {
|
||||
...device,
|
||||
friendly_name: device.friendly_name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Set HVAC mode
|
||||
public setHVACMode(entity_id: string, hvac_mode: string): boolean {
|
||||
const device = this.climateDevices.get(entity_id);
|
||||
if (!device) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate mode
|
||||
if (!["off", "heat", "cool", "auto", "dry", "fan_only"].includes(hvac_mode)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set mode
|
||||
device.hvac_mode = hvac_mode as any;
|
||||
|
||||
// Update state based on mode
|
||||
device.state = hvac_mode === "off" ? "off" : "on";
|
||||
|
||||
this.climateDevices.set(entity_id, device);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Set temperature
|
||||
public setTemperature(
|
||||
entity_id: string,
|
||||
temperature?: number,
|
||||
target_temp_high?: number,
|
||||
target_temp_low?: number
|
||||
): boolean {
|
||||
const device = this.climateDevices.get(entity_id);
|
||||
if (!device) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Single temperature setting
|
||||
if (temperature !== undefined &&
|
||||
device.supported_features.includes("target_temperature")) {
|
||||
device.temperature = temperature;
|
||||
}
|
||||
|
||||
// Temperature range setting
|
||||
if (target_temp_high !== undefined &&
|
||||
target_temp_low !== undefined &&
|
||||
device.supported_features.includes("target_temperature_range")) {
|
||||
device.target_temp_high = target_temp_high;
|
||||
device.target_temp_low = target_temp_low;
|
||||
}
|
||||
|
||||
this.climateDevices.set(entity_id, device);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Set fan mode
|
||||
public setFanMode(entity_id: string, fan_mode: string): boolean {
|
||||
const device = this.climateDevices.get(entity_id);
|
||||
if (!device) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Validate fan mode
|
||||
if (!["auto", "low", "medium", "high"].includes(fan_mode)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if fan mode is supported
|
||||
if (!device.supported_features.includes("fan_mode")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Set fan mode
|
||||
device.fan_mode = fan_mode as any;
|
||||
|
||||
this.climateDevices.set(entity_id, device);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
const haClimateService = new MockHAClimateService();
|
||||
|
||||
// Define the schema for our tool parameters
|
||||
const climateControlSchema = z.object({
|
||||
action: z.enum(["list", "get", "set_hvac_mode", "set_temperature", "set_fan_mode"]).describe("The action to perform"),
|
||||
entity_id: z.string().optional().describe("The entity ID of the climate device to control"),
|
||||
hvac_mode: z.enum(["off", "heat", "cool", "auto", "dry", "fan_only"]).optional().describe("The HVAC mode to set"),
|
||||
temperature: z.number().optional().describe("The target temperature to set"),
|
||||
target_temp_high: z.number().optional().describe("The maximum target temperature to set"),
|
||||
target_temp_low: z.number().optional().describe("The minimum target temperature to set"),
|
||||
fan_mode: z.enum(["auto", "low", "medium", "high"]).optional().describe("The fan mode to set"),
|
||||
});
|
||||
|
||||
type ClimateControlParams = z.infer<typeof climateControlSchema>;
|
||||
|
||||
/**
|
||||
* Tool for controlling climate devices in Home Assistant
|
||||
*/
|
||||
export class ClimateControlTool extends BaseTool {
|
||||
constructor() {
|
||||
super({
|
||||
name: "climate_control",
|
||||
description: "Control climate devices in Home Assistant",
|
||||
parameters: climateControlSchema,
|
||||
metadata: {
|
||||
category: "home_assistant",
|
||||
version: "1.0.0",
|
||||
tags: ["climate", "thermostat", "hvac", "home_assistant"],
|
||||
examples: [
|
||||
{
|
||||
description: "List all climate devices",
|
||||
params: { action: "list" }
|
||||
},
|
||||
{
|
||||
description: "Set temperature",
|
||||
params: {
|
||||
action: "set_temperature",
|
||||
entity_id: "climate.living_room",
|
||||
temperature: 72
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool
|
||||
*/
|
||||
public async execute(params: ClimateControlParams, context: MCPContext): Promise<Record<string, unknown>> {
|
||||
logger.debug(`Executing ClimateControlTool with params: ${JSON.stringify(params)}`);
|
||||
|
||||
try {
|
||||
// Add an await here to satisfy the linter
|
||||
await Promise.resolve();
|
||||
|
||||
switch (params.action) {
|
||||
case "list":
|
||||
return this.listClimateDevices();
|
||||
|
||||
case "get":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for get action");
|
||||
}
|
||||
return this.getClimateDevice(params.entity_id);
|
||||
|
||||
case "set_hvac_mode":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for set_hvac_mode action");
|
||||
}
|
||||
if (!params.hvac_mode) {
|
||||
throw new Error("hvac_mode is required for set_hvac_mode action");
|
||||
}
|
||||
return this.setHVACMode(params.entity_id, params.hvac_mode);
|
||||
|
||||
case "set_temperature":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for set_temperature action");
|
||||
}
|
||||
if (params.temperature === undefined &&
|
||||
(params.target_temp_high === undefined || params.target_temp_low === undefined)) {
|
||||
throw new Error("Either temperature or both target_temp_high and target_temp_low are required");
|
||||
}
|
||||
return this.setTemperature(
|
||||
params.entity_id,
|
||||
params.temperature,
|
||||
params.target_temp_high,
|
||||
params.target_temp_low
|
||||
);
|
||||
|
||||
case "set_fan_mode":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for set_fan_mode action");
|
||||
}
|
||||
if (!params.fan_mode) {
|
||||
throw new Error("fan_mode is required for set_fan_mode action");
|
||||
}
|
||||
return this.setFanMode(params.entity_id, params.fan_mode);
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown action: ${String(params.action)}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error in ClimateControlTool: ${String(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all climate devices
|
||||
*/
|
||||
private listClimateDevices(): Record<string, unknown> {
|
||||
const devices = haClimateService.getClimateDevices();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
climate_devices: devices,
|
||||
count: devices.length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific climate device
|
||||
*/
|
||||
private getClimateDevice(entity_id: string): Record<string, unknown> {
|
||||
const device = haClimateService.getClimateDevice(entity_id);
|
||||
|
||||
if (!device) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Climate device ${entity_id} not found`
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
device
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set HVAC mode
|
||||
*/
|
||||
private setHVACMode(entity_id: string, hvac_mode: string): Record<string, unknown> {
|
||||
const success = haClimateService.setHVACMode(entity_id, hvac_mode);
|
||||
|
||||
if (!success) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to set HVAC mode for ${entity_id}: device not found or mode not supported`
|
||||
};
|
||||
}
|
||||
|
||||
const device = haClimateService.getClimateDevice(entity_id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Set HVAC mode to ${hvac_mode} for ${entity_id}`,
|
||||
device
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set temperature
|
||||
*/
|
||||
private setTemperature(
|
||||
entity_id: string,
|
||||
temperature?: number,
|
||||
target_temp_high?: number,
|
||||
target_temp_low?: number
|
||||
): Record<string, unknown> {
|
||||
const success = haClimateService.setTemperature(
|
||||
entity_id,
|
||||
temperature,
|
||||
target_temp_high,
|
||||
target_temp_low
|
||||
);
|
||||
|
||||
if (!success) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to set temperature for ${entity_id}: device not found or feature not supported`
|
||||
};
|
||||
}
|
||||
|
||||
const device = haClimateService.getClimateDevice(entity_id);
|
||||
const tempMessage = temperature !== undefined
|
||||
? `temperature to ${temperature}°`
|
||||
: `temperature range to ${target_temp_low}° - ${target_temp_high}°`;
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Set ${tempMessage} for ${entity_id}`,
|
||||
device
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Set fan mode
|
||||
*/
|
||||
private setFanMode(entity_id: string, fan_mode: string): Record<string, unknown> {
|
||||
const success = haClimateService.setFanMode(entity_id, fan_mode);
|
||||
|
||||
if (!success) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to set fan mode for ${entity_id}: device not found or mode not supported`
|
||||
};
|
||||
}
|
||||
|
||||
const device = haClimateService.getClimateDevice(entity_id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Set fan mode to ${fan_mode} for ${entity_id}`,
|
||||
device
|
||||
};
|
||||
}
|
||||
}
|
||||
327
src/tools/homeassistant/lights.tool.ts
Normal file
327
src/tools/homeassistant/lights.tool.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
/**
|
||||
* Lights Control Tool for Home Assistant
|
||||
*
|
||||
* This tool allows controlling lights in Home Assistant through the MCP.
|
||||
* It supports turning lights on/off, changing brightness, color, and color temperature.
|
||||
*/
|
||||
|
||||
import { z } from "zod";
|
||||
import { BaseTool } from "../base-tool.js";
|
||||
import { logger } from "../../utils/logger.js";
|
||||
import { MCPContext } from "../../mcp/types.js";
|
||||
|
||||
// Mock Home Assistant API service in absence of actual HA integration
|
||||
class MockHALightsService {
|
||||
private lights: Map<string, {
|
||||
state: "on" | "off";
|
||||
brightness?: number;
|
||||
color_temp?: number;
|
||||
rgb_color?: [number, number, number];
|
||||
friendly_name: string;
|
||||
}>;
|
||||
|
||||
constructor() {
|
||||
// Initialize with some mock lights
|
||||
this.lights = new Map([
|
||||
["light.living_room", {
|
||||
state: "off",
|
||||
brightness: 255,
|
||||
friendly_name: "Living Room Light"
|
||||
}],
|
||||
["light.kitchen", {
|
||||
state: "on",
|
||||
brightness: 200,
|
||||
friendly_name: "Kitchen Light"
|
||||
}],
|
||||
["light.bedroom", {
|
||||
state: "off",
|
||||
brightness: 150,
|
||||
color_temp: 400,
|
||||
friendly_name: "Bedroom Light"
|
||||
}],
|
||||
["light.office", {
|
||||
state: "on",
|
||||
brightness: 255,
|
||||
rgb_color: [255, 255, 255],
|
||||
friendly_name: "Office Light"
|
||||
}]
|
||||
]);
|
||||
}
|
||||
|
||||
// Get all lights
|
||||
public getLights(): Record<string, unknown>[] {
|
||||
const result = [];
|
||||
for (const [entity_id, light] of this.lights.entries()) {
|
||||
result.push({
|
||||
entity_id,
|
||||
state: light.state,
|
||||
attributes: {
|
||||
...light,
|
||||
friendly_name: light.friendly_name
|
||||
}
|
||||
});
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Get a specific light
|
||||
public getLight(entity_id: string): Record<string, unknown> | null {
|
||||
const light = this.lights.get(entity_id);
|
||||
if (!light) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
entity_id,
|
||||
state: light.state,
|
||||
attributes: {
|
||||
...light,
|
||||
friendly_name: light.friendly_name
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Turn a light on
|
||||
public turnOn(entity_id: string, attributes: Record<string, unknown> = {}): boolean {
|
||||
const light = this.lights.get(entity_id);
|
||||
if (!light) {
|
||||
return false;
|
||||
}
|
||||
|
||||
light.state = "on";
|
||||
|
||||
// Apply attributes
|
||||
if (typeof attributes.brightness === "number") {
|
||||
light.brightness = Math.max(0, Math.min(255, attributes.brightness));
|
||||
}
|
||||
|
||||
if (typeof attributes.color_temp === "number") {
|
||||
light.color_temp = Math.max(153, Math.min(500, attributes.color_temp));
|
||||
}
|
||||
|
||||
if (Array.isArray(attributes.rgb_color) && attributes.rgb_color.length >= 3) {
|
||||
// Individually extract and validate each RGB component
|
||||
const r = Number(attributes.rgb_color[0]);
|
||||
const g = Number(attributes.rgb_color[1]);
|
||||
const b = Number(attributes.rgb_color[2]);
|
||||
|
||||
// Only set if we got valid numbers
|
||||
if (!isNaN(r) && !isNaN(g) && !isNaN(b)) {
|
||||
light.rgb_color = [
|
||||
Math.max(0, Math.min(255, r)),
|
||||
Math.max(0, Math.min(255, g)),
|
||||
Math.max(0, Math.min(255, b))
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
this.lights.set(entity_id, light);
|
||||
return true;
|
||||
}
|
||||
|
||||
// Turn a light off
|
||||
public turnOff(entity_id: string): boolean {
|
||||
const light = this.lights.get(entity_id);
|
||||
if (!light) {
|
||||
return false;
|
||||
}
|
||||
|
||||
light.state = "off";
|
||||
this.lights.set(entity_id, light);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
const haLightsService = new MockHALightsService();
|
||||
|
||||
// Define the schema for our tool parameters
|
||||
const lightsControlSchema = z.object({
|
||||
action: z.enum(["list", "get", "turn_on", "turn_off"]).describe("The action to perform"),
|
||||
entity_id: z.string().optional().describe("The entity ID of the light to control"),
|
||||
brightness: z.number().min(0).max(255).optional().describe("Brightness level (0-255)"),
|
||||
color_temp: z.number().min(153).max(500).optional().describe("Color temperature (153-500)"),
|
||||
rgb_color: z.tuple([
|
||||
z.number().min(0).max(255),
|
||||
z.number().min(0).max(255),
|
||||
z.number().min(0).max(255)
|
||||
]).optional().describe("RGB color as [r, g, b]"),
|
||||
});
|
||||
|
||||
type LightsControlParams = z.infer<typeof lightsControlSchema>;
|
||||
|
||||
/**
|
||||
* Tool for controlling lights in Home Assistant
|
||||
*/
|
||||
export class LightsControlTool extends BaseTool {
|
||||
constructor() {
|
||||
super({
|
||||
name: "lights_control",
|
||||
description: "Control lights in Home Assistant",
|
||||
parameters: lightsControlSchema,
|
||||
metadata: {
|
||||
category: "home_assistant",
|
||||
version: "1.0.0",
|
||||
tags: ["lights", "home_assistant", "control"],
|
||||
examples: [
|
||||
{
|
||||
description: "List all lights",
|
||||
params: { action: "list" }
|
||||
},
|
||||
{
|
||||
description: "Turn on a light with brightness",
|
||||
params: {
|
||||
action: "turn_on",
|
||||
entity_id: "light.living_room",
|
||||
brightness: 200
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the tool
|
||||
*/
|
||||
public async execute(params: LightsControlParams, context: MCPContext): Promise<Record<string, unknown>> {
|
||||
logger.debug(`Executing LightsControlTool with params: ${JSON.stringify(params)}`);
|
||||
|
||||
try {
|
||||
// Add an await here to satisfy the linter
|
||||
await Promise.resolve();
|
||||
|
||||
// Pre-declare variables that will be used in the switch statement
|
||||
let attributes: Record<string, unknown>;
|
||||
|
||||
switch (params.action) {
|
||||
case "list":
|
||||
return this.listLights();
|
||||
|
||||
case "get":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for get action");
|
||||
}
|
||||
return this.getLight(params.entity_id);
|
||||
|
||||
case "turn_on":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for turn_on action");
|
||||
}
|
||||
|
||||
// Initialize attributes outside the case block
|
||||
attributes = {};
|
||||
|
||||
if (params.brightness !== undefined) {
|
||||
attributes.brightness = params.brightness;
|
||||
}
|
||||
|
||||
if (params.color_temp !== undefined) {
|
||||
attributes.color_temp = params.color_temp;
|
||||
}
|
||||
|
||||
if (params.rgb_color !== undefined) {
|
||||
// Ensure the rgb_color is passed correctly
|
||||
attributes.rgb_color = [
|
||||
params.rgb_color[0],
|
||||
params.rgb_color[1],
|
||||
params.rgb_color[2]
|
||||
];
|
||||
}
|
||||
|
||||
return this.turnOnLight(params.entity_id, attributes);
|
||||
|
||||
case "turn_off":
|
||||
if (!params.entity_id) {
|
||||
throw new Error("entity_id is required for turn_off action");
|
||||
}
|
||||
return this.turnOffLight(params.entity_id);
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown action: ${String(params.action)}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error(`Error in LightsControlTool: ${String(error)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all available lights
|
||||
*/
|
||||
private listLights(): Record<string, unknown> {
|
||||
const lights = haLightsService.getLights();
|
||||
|
||||
return {
|
||||
success: true,
|
||||
lights,
|
||||
count: lights.length
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a specific light
|
||||
*/
|
||||
private getLight(entity_id: string): Record<string, unknown> {
|
||||
const light = haLightsService.getLight(entity_id);
|
||||
|
||||
if (!light) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Light ${entity_id} not found`
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
light
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn on a light
|
||||
*/
|
||||
private turnOnLight(
|
||||
entity_id: string,
|
||||
attributes: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const success = haLightsService.turnOn(entity_id, attributes);
|
||||
|
||||
if (!success) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to turn on ${entity_id}: light not found`
|
||||
};
|
||||
}
|
||||
|
||||
const light = haLightsService.getLight(entity_id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Turned on ${entity_id}`,
|
||||
light
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Turn off a light
|
||||
*/
|
||||
private turnOffLight(entity_id: string): Record<string, unknown> {
|
||||
const success = haLightsService.turnOff(entity_id);
|
||||
|
||||
if (!success) {
|
||||
return {
|
||||
success: false,
|
||||
error: `Failed to turn off ${entity_id}: light not found`
|
||||
};
|
||||
}
|
||||
|
||||
const light = haLightsService.getLight(entity_id);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Turned off ${entity_id}`,
|
||||
light
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,112 +1,77 @@
|
||||
/**
|
||||
* Logging Module
|
||||
*
|
||||
* This module provides logging functionality with rotation support.
|
||||
* It uses winston for logging and winston-daily-rotate-file for rotation.
|
||||
*
|
||||
* @module logger
|
||||
* Logger Module
|
||||
*
|
||||
* This module provides a consistent logging interface for all MCP components.
|
||||
* It handles log formatting, error handling, and ensures log output is directed
|
||||
* to the appropriate destination based on the runtime environment.
|
||||
*/
|
||||
|
||||
import winston from "winston";
|
||||
import DailyRotateFile from "winston-daily-rotate-file";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
import winston from 'winston';
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
/**
|
||||
* Log levels configuration
|
||||
* Defines the severity levels for logging
|
||||
*/
|
||||
const levels = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
http: 3,
|
||||
debug: 4,
|
||||
};
|
||||
// Ensure logs directory exists
|
||||
const logsDir = path.join(process.cwd(), 'logs');
|
||||
if (!fs.existsSync(logsDir)) {
|
||||
fs.mkdirSync(logsDir, { recursive: true });
|
||||
}
|
||||
|
||||
/**
|
||||
* Log level colors configuration
|
||||
* Defines colors for different log levels
|
||||
*/
|
||||
const colors = {
|
||||
error: "red",
|
||||
warn: "yellow",
|
||||
info: "green",
|
||||
http: "magenta",
|
||||
debug: "white",
|
||||
};
|
||||
// Special handling for stdio mode to ensure stdout stays clean for JSON-RPC
|
||||
const isStdioMode = process.env.USE_STDIO_TRANSPORT === 'true';
|
||||
const isDebugStdio = process.env.DEBUG_STDIO === 'true';
|
||||
|
||||
/**
|
||||
* Add colors to winston
|
||||
*/
|
||||
winston.addColors(colors);
|
||||
|
||||
/**
|
||||
* Log format configuration
|
||||
* Defines how log messages are formatted
|
||||
*/
|
||||
const format = winston.format.combine(
|
||||
winston.format.timestamp({ format: "YYYY-MM-DD HH:mm:ss:ms" }),
|
||||
winston.format.colorize({ all: true }),
|
||||
winston.format.printf(
|
||||
(info) => `${info.timestamp} ${info.level}: ${info.message}`,
|
||||
),
|
||||
// Create base format that works with TypeScript
|
||||
const baseFormat = winston.format.combine(
|
||||
winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss' }),
|
||||
winston.format.errors({ stack: true }),
|
||||
winston.format.json()
|
||||
);
|
||||
|
||||
/**
|
||||
* Transport for daily rotating file
|
||||
* Configures how logs are rotated and stored
|
||||
*/
|
||||
const dailyRotateFileTransport = new DailyRotateFile({
|
||||
filename: "logs/%DATE%.log",
|
||||
datePattern: "YYYY-MM-DD",
|
||||
zippedArchive: true,
|
||||
maxSize: "20m",
|
||||
maxFiles: "14d",
|
||||
format: winston.format.combine(
|
||||
winston.format.uncolorize(),
|
||||
winston.format.timestamp(),
|
||||
winston.format.json(),
|
||||
),
|
||||
});
|
||||
|
||||
/**
|
||||
* Transport for error logs
|
||||
* Stores error logs in a separate file
|
||||
*/
|
||||
const errorFileTransport = new DailyRotateFile({
|
||||
filename: "logs/error-%DATE%.log",
|
||||
datePattern: "YYYY-MM-DD",
|
||||
level: "error",
|
||||
zippedArchive: true,
|
||||
maxSize: "20m",
|
||||
maxFiles: "14d",
|
||||
format: winston.format.combine(
|
||||
winston.format.uncolorize(),
|
||||
winston.format.timestamp(),
|
||||
winston.format.json(),
|
||||
),
|
||||
});
|
||||
|
||||
/**
|
||||
* Create the logger instance
|
||||
*/
|
||||
// Create logger with appropriate transports
|
||||
const logger = winston.createLogger({
|
||||
level: APP_CONFIG.NODE_ENV === "development" ? "debug" : "info",
|
||||
levels,
|
||||
format,
|
||||
level: process.env.LOG_LEVEL || 'error',
|
||||
format: baseFormat,
|
||||
defaultMeta: { service: 'mcp-server' },
|
||||
transports: [
|
||||
new winston.transports.Console({
|
||||
// Always log to files
|
||||
new winston.transports.File({ filename: path.join(logsDir, 'error.log'), level: 'error' }),
|
||||
new winston.transports.File({ filename: path.join(logsDir, 'combined.log') })
|
||||
]
|
||||
});
|
||||
|
||||
// Handle console output based on environment
|
||||
if (process.env.NODE_ENV !== 'production' || process.env.CONSOLE_LOGGING === 'true') {
|
||||
// In stdio mode with debug enabled, ensure logs only go to stderr to keep stdout clean for JSON-RPC
|
||||
if (isStdioMode && isDebugStdio) {
|
||||
// Use stderr stream transport in stdio debug mode
|
||||
logger.add(new winston.transports.Stream({
|
||||
stream: process.stderr,
|
||||
format: winston.format.combine(
|
||||
winston.format.simple()
|
||||
)
|
||||
}));
|
||||
} else {
|
||||
// Use console transport in normal mode
|
||||
logger.add(new winston.transports.Console({
|
||||
format: winston.format.combine(
|
||||
winston.format.colorize(),
|
||||
winston.format.simple(),
|
||||
),
|
||||
}),
|
||||
dailyRotateFileTransport,
|
||||
errorFileTransport,
|
||||
],
|
||||
});
|
||||
winston.format.simple()
|
||||
)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Export the logger instance
|
||||
*/
|
||||
// Custom logger interface
|
||||
export interface MCPLogger {
|
||||
debug: (message: string, meta?: Record<string, any>) => void;
|
||||
info: (message: string, meta?: Record<string, any>) => void;
|
||||
warn: (message: string, meta?: Record<string, any>) => void;
|
||||
error: (message: string, meta?: Record<string, any>) => void;
|
||||
child: (options: Record<string, any>) => MCPLogger;
|
||||
}
|
||||
|
||||
// Export the winston logger with MCPLogger interface
|
||||
export { logger };
|
||||
|
||||
// Export default logger for convenience
|
||||
export default logger;
|
||||
|
||||
339
src/utils/stdio-transport.ts
Normal file
339
src/utils/stdio-transport.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
/**
|
||||
* Stdio Transport Module
|
||||
*
|
||||
* This module implements communication via standard input/output streams
|
||||
* using JSON-RPC 2.0 format for sending and receiving messages.
|
||||
*
|
||||
* @module stdio-transport
|
||||
*/
|
||||
|
||||
import { createInterface } from "readline";
|
||||
import { logger } from "./logger.js";
|
||||
import { z } from "zod";
|
||||
|
||||
// JSON-RPC 2.0 error codes
|
||||
export enum JsonRpcErrorCode {
|
||||
// Standard JSON-RPC 2.0 error codes
|
||||
PARSE_ERROR = -32700,
|
||||
INVALID_REQUEST = -32600,
|
||||
METHOD_NOT_FOUND = -32601,
|
||||
INVALID_PARAMS = -32602,
|
||||
INTERNAL_ERROR = -32603,
|
||||
// MCP specific error codes
|
||||
TOOL_EXECUTION_ERROR = -32000,
|
||||
VALIDATION_ERROR = -32001,
|
||||
}
|
||||
|
||||
// Type definitions for JSON-RPC 2.0 messages
|
||||
export interface JsonRpcRequest {
|
||||
jsonrpc: "2.0";
|
||||
id: string | number;
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface JsonRpcResponse {
|
||||
jsonrpc: "2.0";
|
||||
id: string | number;
|
||||
result?: unknown;
|
||||
error?: JsonRpcError;
|
||||
}
|
||||
|
||||
export interface JsonRpcError {
|
||||
code: number;
|
||||
message: string;
|
||||
data?: unknown;
|
||||
}
|
||||
|
||||
export interface JsonRpcNotification {
|
||||
jsonrpc: "2.0";
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
// Setup readline interface for stdin
|
||||
const rl = createInterface({
|
||||
input: process.stdin,
|
||||
terminal: false
|
||||
});
|
||||
|
||||
// Message handlers map
|
||||
const messageHandlers: Map<string, {
|
||||
execute: (params: Record<string, unknown>) => Promise<unknown>;
|
||||
paramsSchema?: z.ZodType<any>;
|
||||
}> = new Map();
|
||||
|
||||
/**
|
||||
* Initialize stdio transport
|
||||
* Sets up event listeners and message processing
|
||||
*/
|
||||
export function initStdioTransport(): void {
|
||||
// Check for silent startup mode
|
||||
const silentStartup = process.env.SILENT_STARTUP === 'true';
|
||||
|
||||
// Handle line events (incoming JSON)
|
||||
rl.on('line', async (line) => {
|
||||
try {
|
||||
// Parse incoming JSON
|
||||
const request = JSON.parse(line);
|
||||
|
||||
// Validate it's a proper JSON-RPC 2.0 request
|
||||
if (!request.jsonrpc || request.jsonrpc !== "2.0") {
|
||||
sendErrorResponse({
|
||||
id: request.id || null,
|
||||
code: JsonRpcErrorCode.INVALID_REQUEST,
|
||||
message: "Invalid JSON-RPC 2.0 request: missing or invalid jsonrpc version"
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle request with ID (requires response)
|
||||
if (request.id !== undefined) {
|
||||
await handleJsonRpcRequest(request as JsonRpcRequest).catch(err => {
|
||||
if (!silentStartup) {
|
||||
logger.error(`Error handling request: ${String(err)}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
// Handle notification (no response expected)
|
||||
else if (request.method) {
|
||||
void handleJsonRpcNotification(request as JsonRpcNotification);
|
||||
}
|
||||
// Invalid request format
|
||||
else {
|
||||
sendErrorResponse({
|
||||
id: null,
|
||||
code: JsonRpcErrorCode.INVALID_REQUEST,
|
||||
message: "Invalid JSON-RPC 2.0 message format"
|
||||
});
|
||||
}
|
||||
} catch (parseError) {
|
||||
// Handle JSON parsing errors
|
||||
if (!silentStartup) {
|
||||
logger.error(`Failed to parse JSON input: ${String(parseError)}`);
|
||||
}
|
||||
sendErrorResponse({
|
||||
id: null,
|
||||
code: JsonRpcErrorCode.PARSE_ERROR,
|
||||
message: "Parse error: invalid JSON",
|
||||
data: parseError instanceof Error ? parseError.message : String(parseError)
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Handle stdin close
|
||||
rl.on('close', () => {
|
||||
if (!silentStartup) {
|
||||
logger.info('Stdin closed, shutting down');
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
// Log initialization only if not in silent mode
|
||||
if (!silentStartup) {
|
||||
logger.info("JSON-RPC 2.0 stdio transport initialized");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a JSON-RPC request that requires a response
|
||||
*/
|
||||
async function handleJsonRpcRequest(request: JsonRpcRequest): Promise<void> {
|
||||
const { id, method, params = {} } = request;
|
||||
|
||||
// Log to file but not console
|
||||
logger.debug(`Received request: ${id} - ${method}`);
|
||||
|
||||
// Look up handler
|
||||
const handler = messageHandlers.get(method);
|
||||
if (!handler) {
|
||||
sendErrorResponse({
|
||||
id,
|
||||
code: JsonRpcErrorCode.METHOD_NOT_FOUND,
|
||||
message: `Method not found: ${method}`
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate parameters if schema exists
|
||||
if (handler.paramsSchema) {
|
||||
try {
|
||||
const validationResult = handler.paramsSchema.parse(params);
|
||||
// If validation changes values (e.g. default values), use the validated result
|
||||
Object.assign(params, validationResult);
|
||||
} catch (validationError) {
|
||||
sendErrorResponse({
|
||||
id,
|
||||
code: JsonRpcErrorCode.INVALID_PARAMS,
|
||||
message: "Invalid parameters",
|
||||
data: validationError instanceof Error ? validationError.message : String(validationError)
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Execute handler
|
||||
const result = await handler.execute(params);
|
||||
|
||||
// Send successful response
|
||||
sendResponse({
|
||||
id,
|
||||
result
|
||||
});
|
||||
} catch (error) {
|
||||
// Handle execution errors
|
||||
sendErrorResponse({
|
||||
id,
|
||||
code: JsonRpcErrorCode.TOOL_EXECUTION_ERROR,
|
||||
message: error instanceof Error ? error.message : String(error),
|
||||
data: error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle a JSON-RPC notification (no response required)
|
||||
*/
|
||||
async function handleJsonRpcNotification(notification: JsonRpcNotification): Promise<void> {
|
||||
const { method, params = {} } = notification;
|
||||
|
||||
// Log to file but not console
|
||||
logger.debug(`Received notification: ${method}`);
|
||||
|
||||
// Look up handler
|
||||
const handler = messageHandlers.get(method);
|
||||
if (!handler) {
|
||||
// No response for notifications even if method not found
|
||||
logger.warn(`Method not found for notification: ${method}`);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Validate parameters if schema exists
|
||||
if (handler.paramsSchema) {
|
||||
try {
|
||||
handler.paramsSchema.parse(params);
|
||||
} catch (validationError) {
|
||||
logger.error(`Invalid parameters for notification ${method}: ${String(validationError)}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Execute handler (fire and forget)
|
||||
await handler.execute(params);
|
||||
} catch (error) {
|
||||
// Log execution errors but don't send response
|
||||
logger.error(`Error handling notification ${method}: ${String(error)}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register a message handler for a specific method
|
||||
*
|
||||
* @param method - The method name to handle
|
||||
* @param handler - The function to handle the method
|
||||
* @param paramsSchema - Optional Zod schema for parameter validation
|
||||
*/
|
||||
export function registerHandler(
|
||||
method: string,
|
||||
handler: (params: Record<string, unknown>) => Promise<unknown>,
|
||||
paramsSchema?: z.ZodType<any>
|
||||
): void {
|
||||
messageHandlers.set(method, {
|
||||
execute: handler,
|
||||
paramsSchema
|
||||
});
|
||||
logger.debug(`Registered handler for method: ${method}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a successful response to stdout
|
||||
*
|
||||
* @param options - The response options
|
||||
*/
|
||||
export function sendResponse({ id, result }: { id: string | number; result?: unknown }): void {
|
||||
const response: JsonRpcResponse = {
|
||||
jsonrpc: "2.0",
|
||||
id,
|
||||
result
|
||||
};
|
||||
|
||||
const jsonResponse = JSON.stringify(response);
|
||||
process.stdout.write(jsonResponse + '\n');
|
||||
logger.debug(`Sent response: ${id}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an error response to stdout
|
||||
*
|
||||
* @param error - The error details
|
||||
*/
|
||||
export function sendErrorResponse({
|
||||
id,
|
||||
code,
|
||||
message,
|
||||
data
|
||||
}: {
|
||||
id: string | number | null;
|
||||
code: number;
|
||||
message: string;
|
||||
data?: unknown;
|
||||
}): void {
|
||||
const response: JsonRpcResponse = {
|
||||
jsonrpc: "2.0",
|
||||
id: id ?? null,
|
||||
error: {
|
||||
code,
|
||||
message,
|
||||
data
|
||||
}
|
||||
};
|
||||
|
||||
const jsonResponse = JSON.stringify(response);
|
||||
process.stdout.write(jsonResponse + '\n');
|
||||
logger.error(`Sent error response: ${id} - [${code}] ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a notification to the client (no response expected)
|
||||
*
|
||||
* @param method - The notification method name
|
||||
* @param params - The notification parameters
|
||||
*/
|
||||
export function sendNotification(method: string, params?: Record<string, unknown>): void {
|
||||
const notification: JsonRpcNotification = {
|
||||
jsonrpc: "2.0",
|
||||
method,
|
||||
params
|
||||
};
|
||||
|
||||
const jsonNotification = JSON.stringify(notification);
|
||||
process.stdout.write(jsonNotification + '\n');
|
||||
logger.debug(`Sent notification: ${method}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a log message to the client
|
||||
*
|
||||
* @param level - The log level (info, warn, error, debug)
|
||||
* @param message - The log message
|
||||
* @param data - Optional additional data
|
||||
*/
|
||||
export function sendLogMessage(level: string, message: string, data?: unknown): void {
|
||||
sendNotification("log", {
|
||||
level,
|
||||
message,
|
||||
data,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable debug mode for the transport
|
||||
* Increases logging verbosity
|
||||
*/
|
||||
export function enableDebugMode(): void {
|
||||
logger.level = "debug";
|
||||
logger.info("Debug mode enabled for stdio transport");
|
||||
}
|
||||
3
start.sh
Executable file
3
start.sh
Executable file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
export NODE_ENV=development
|
||||
exec bun --smol run start
|
||||
97
stdio-start.sh
Executable file
97
stdio-start.sh
Executable file
@@ -0,0 +1,97 @@
|
||||
#!/bin/bash
|
||||
|
||||
# MCP Server Stdio Transport Launcher
|
||||
# This script builds and runs the MCP server using stdin/stdout JSON-RPC 2.0 transport
|
||||
|
||||
# ANSI colors for prettier output
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
# Show usage information
|
||||
function show_usage {
|
||||
echo -e "${BLUE}Usage:${NC} $0 [options]"
|
||||
echo
|
||||
echo "Options:"
|
||||
echo " --debug Enable debug mode"
|
||||
echo " --rebuild Force rebuild even if dist exists"
|
||||
echo " --help Show this help message"
|
||||
echo
|
||||
echo "Examples:"
|
||||
echo " $0 # Normal start"
|
||||
echo " $0 --debug # Start with debug logging"
|
||||
echo " $0 --rebuild # Force rebuild"
|
||||
echo
|
||||
echo "This script runs the MCP server with JSON-RPC 2.0 stdio transport."
|
||||
echo "Logs will be written to the logs directory but not to stdout."
|
||||
echo
|
||||
}
|
||||
|
||||
# Process command line arguments
|
||||
REBUILD=false
|
||||
DEBUG=false
|
||||
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--help)
|
||||
show_usage
|
||||
exit 0
|
||||
;;
|
||||
--debug)
|
||||
DEBUG=true
|
||||
shift
|
||||
;;
|
||||
--rebuild)
|
||||
REBUILD=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo -e "${RED}Unknown option:${NC} $arg"
|
||||
show_usage
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
# Check for errors
|
||||
if [ ! -f ".env" ]; then
|
||||
echo -e "${RED}Error:${NC} .env file not found. Please create one from .env.example." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Set environment variables
|
||||
export USE_STDIO_TRANSPORT=true
|
||||
|
||||
# Set debug mode if requested
|
||||
if [ "$DEBUG" = true ]; then
|
||||
export DEBUG=true
|
||||
echo -e "${YELLOW}Debug mode enabled${NC}" >&2
|
||||
fi
|
||||
|
||||
# Check if we need to build
|
||||
if [ ! -d "dist" ] || [ "$REBUILD" = true ]; then
|
||||
echo -e "${BLUE}Building MCP server with stdio transport...${NC}" >&2
|
||||
bun build ./src/index.ts --outdir ./dist --target bun || {
|
||||
echo -e "${RED}Build failed!${NC}" >&2
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
echo -e "${GREEN}Using existing build in dist/ directory${NC}" >&2
|
||||
echo -e "${YELLOW}Use --rebuild flag to force a rebuild${NC}" >&2
|
||||
fi
|
||||
|
||||
# Create logs directory if it doesn't exist
|
||||
mkdir -p logs
|
||||
|
||||
# Run the application with stdio transport
|
||||
echo -e "${GREEN}Starting MCP server with stdio transport...${NC}" >&2
|
||||
echo -e "${YELLOW}Note: All logs will be written to logs/ directory${NC}" >&2
|
||||
echo -e "${YELLOW}Press Ctrl+C to stop${NC}" >&2
|
||||
|
||||
# Execute the server
|
||||
exec bun run dist/index.js
|
||||
|
||||
# The exec replaces this shell with the server process
|
||||
# so any code after this point will not be executed
|
||||
33
test/setup.ts
Normal file
33
test/setup.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { beforeAll, afterAll } from 'bun:test';
|
||||
|
||||
// Mock environment variables for testing
|
||||
const TEST_ENV = {
|
||||
NODE_ENV: 'test',
|
||||
PORT: '3000',
|
||||
EXECUTION_TIMEOUT: '30000',
|
||||
STREAMING_ENABLED: 'false',
|
||||
USE_STDIO_TRANSPORT: 'false',
|
||||
USE_HTTP_TRANSPORT: 'true',
|
||||
DEBUG_MODE: 'false',
|
||||
DEBUG_STDIO: 'false',
|
||||
DEBUG_HTTP: 'false',
|
||||
SILENT_STARTUP: 'false',
|
||||
CORS_ORIGIN: '*',
|
||||
RATE_LIMIT_MAX_REQUESTS: '100',
|
||||
RATE_LIMIT_MAX_AUTH_REQUESTS: '5'
|
||||
};
|
||||
|
||||
beforeAll(() => {
|
||||
// Store original environment
|
||||
process.env = {
|
||||
...process.env,
|
||||
...TEST_ENV
|
||||
};
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
// Clean up test environment
|
||||
Object.keys(TEST_ENV).forEach(key => {
|
||||
delete process.env[key];
|
||||
});
|
||||
});
|
||||
19
tsconfig.stdio.json
Normal file
19
tsconfig.stdio.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src",
|
||||
"sourceMap": true
|
||||
},
|
||||
"include": [
|
||||
"src/stdio-server.ts",
|
||||
"src/mcp/**/*.ts",
|
||||
"src/utils/**/*.ts",
|
||||
"src/tools/homeassistant/**/*.ts"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"dist",
|
||||
"**/*.test.ts"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user