Compare commits
47 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9727981cc | ||
|
|
e1db799b1d | ||
|
|
905339fb67 | ||
|
|
849b080aba | ||
|
|
f8bbe4af6f | ||
|
|
3a6f79c9a8 | ||
|
|
60f18f8e71 | ||
|
|
47f11b3d95 | ||
|
|
f24be8ff53 | ||
|
|
dfff432321 | ||
|
|
d59bf02d08 | ||
|
|
345a5888d9 | ||
|
|
d6a5771e01 | ||
|
|
5f4ddfbd88 | ||
|
|
c11b40da9e | ||
|
|
3a54766b61 | ||
|
|
8b1948ce30 | ||
|
|
38ee5368d1 | ||
|
|
b0ad1cf0ad | ||
|
|
d6bb83685d | ||
|
|
54112c9059 | ||
|
|
1f79feeccc | ||
|
|
63fd21053c | ||
|
|
5f078ff227 | ||
|
|
5d0c2f54a2 | ||
|
|
02284c787b | ||
|
|
3e97357561 | ||
|
|
cb897d4cf6 | ||
|
|
08e408d68d | ||
|
|
1e3bf07547 | ||
|
|
e503da1dfd | ||
|
|
790a37e49f | ||
|
|
bc1dc8278a | ||
|
|
9a02bdaf11 | ||
|
|
04123a5740 | ||
|
|
e688c94718 | ||
|
|
481dc5b1a8 | ||
|
|
c519d250a1 | ||
|
|
10bf5919e4 | ||
|
|
89f2278c25 | ||
|
|
a53cec7b28 | ||
|
|
b7856e9d05 | ||
|
|
7891115ebe | ||
|
|
a814c427e9 | ||
|
|
840927998e | ||
|
|
cf7fb2422e | ||
|
|
d46a19c698 |
@@ -7,7 +7,6 @@ yarn-error.log*
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
pnpm-lock.yaml
|
||||
bun.lockb
|
||||
|
||||
# Build output
|
||||
dist/
|
||||
@@ -74,3 +73,6 @@ temp/
|
||||
.cloud/
|
||||
*.db
|
||||
*.db-*
|
||||
.cursor/
|
||||
.cursor*
|
||||
.cursorconfig
|
||||
72
.env.example
72
.env.example
@@ -36,26 +36,50 @@ RATE_LIMIT_REGULAR=100
|
||||
# Requests per minute per IP for WebSocket connections
|
||||
RATE_LIMIT_WEBSOCKET=1000
|
||||
|
||||
# Security
|
||||
# JWT secret for token generation (change this in production!)
|
||||
JWT_SECRET=your_jwt_secret_key
|
||||
|
||||
# CORS configuration (comma-separated list of allowed origins)
|
||||
CORS_ORIGINS=http://localhost:3000,http://localhost:8123
|
||||
|
||||
# Test Configuration
|
||||
# Only needed if running tests
|
||||
TEST_HASS_HOST=http://localhost:8123
|
||||
TEST_HASS_TOKEN=test_token
|
||||
TEST_HASS_SOCKET_URL=ws://localhost:8123/api/websocket
|
||||
TEST_PORT=3001
|
||||
|
||||
# Security Configuration
|
||||
JWT_SECRET=your-secret-key
|
||||
# JWT Configuration
|
||||
JWT_SECRET=your_jwt_secret_key_min_32_chars
|
||||
JWT_EXPIRY=86400000
|
||||
JWT_MAX_AGE=2592000000
|
||||
JWT_ALGORITHM=HS256
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_WINDOW_MS=900000 # 15 minutes
|
||||
RATE_LIMIT_MAX=100
|
||||
RATE_LIMIT_WINDOW=900000
|
||||
RATE_LIMIT_MAX_REQUESTS=100
|
||||
|
||||
# Token Security
|
||||
TOKEN_MIN_LENGTH=32
|
||||
MAX_FAILED_ATTEMPTS=5
|
||||
LOCKOUT_DURATION=900000
|
||||
|
||||
# CORS Configuration
|
||||
CORS_ORIGINS=http://localhost:3000,http://localhost:8123
|
||||
CORS_METHODS=GET,POST,PUT,DELETE,OPTIONS
|
||||
CORS_ALLOWED_HEADERS=Content-Type,Authorization,X-Requested-With
|
||||
CORS_EXPOSED_HEADERS=
|
||||
CORS_CREDENTIALS=true
|
||||
CORS_MAX_AGE=86400
|
||||
|
||||
# Content Security Policy
|
||||
CSP_ENABLED=true
|
||||
CSP_REPORT_ONLY=false
|
||||
CSP_REPORT_URI=
|
||||
|
||||
# SSL/TLS Configuration
|
||||
REQUIRE_HTTPS=true
|
||||
HSTS_MAX_AGE=31536000
|
||||
HSTS_INCLUDE_SUBDOMAINS=true
|
||||
HSTS_PRELOAD=true
|
||||
|
||||
# Cookie Security
|
||||
COOKIE_SECRET=your_cookie_secret_key_min_32_chars
|
||||
COOKIE_SECURE=true
|
||||
COOKIE_HTTP_ONLY=true
|
||||
COOKIE_SAME_SITE=Strict
|
||||
|
||||
# Request Limits
|
||||
MAX_REQUEST_SIZE=1048576
|
||||
MAX_REQUEST_FIELDS=1000
|
||||
|
||||
# SSE Configuration
|
||||
SSE_MAX_CLIENTS=1000
|
||||
@@ -71,3 +95,17 @@ LOG_REQUESTS=true
|
||||
|
||||
# Version
|
||||
VERSION=0.1.0
|
||||
|
||||
# Test Configuration
|
||||
# Only needed if running tests
|
||||
TEST_HASS_HOST=http://localhost:8123
|
||||
TEST_HASS_TOKEN=test_token
|
||||
TEST_HASS_SOCKET_URL=ws://localhost:8123/api/websocket
|
||||
TEST_PORT=3001
|
||||
|
||||
# Speech Features Configuration
|
||||
ENABLE_SPEECH_FEATURES=false
|
||||
ENABLE_WAKE_WORD=true
|
||||
ENABLE_SPEECH_TO_TEXT=true
|
||||
WHISPER_MODEL_PATH=/models
|
||||
WHISPER_MODEL_TYPE=base
|
||||
64
.github/workflows/deploy-docs.yml
vendored
Normal file
64
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: Deploy Documentation to GitHub Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- '.github/workflows/deploy-docs.yml'
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
# Allow only one concurrent deployment
|
||||
concurrency:
|
||||
group: "pages"
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Ruby
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: '3.2'
|
||||
bundler-cache: true
|
||||
cache-version: 0
|
||||
|
||||
- name: Setup Pages
|
||||
uses: actions/configure-pages@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
cd docs
|
||||
bundle install
|
||||
|
||||
- name: Build site
|
||||
run: |
|
||||
cd docs
|
||||
bundle exec jekyll build
|
||||
env:
|
||||
JEKYLL_ENV: production
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-pages-artifact@v3
|
||||
with:
|
||||
path: docs/_site
|
||||
|
||||
deploy:
|
||||
needs: build
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v4
|
||||
65
.github/workflows/docker-build-push.yml
vendored
Normal file
65
.github/workflows/docker-build-push.yml
vendored
Normal file
@@ -0,0 +1,65 @@
|
||||
name: Docker Build and Push
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main" ]
|
||||
tags:
|
||||
- 'v*.*.*' # Triggers on version tags like v1.0.0
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0 # Required for version detection
|
||||
|
||||
- name: Bump version and push tag
|
||||
id: tag_version
|
||||
uses: mathieudutour/github-tag-action@v6.1
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
default_bump: patch
|
||||
|
||||
- name: Create Release
|
||||
uses: actions/create-release@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ steps.tag_version.outputs.new_tag }}
|
||||
release_name: Release ${{ steps.tag_version.outputs.new_tag }}
|
||||
body: ${{ steps.tag_version.outputs.changelog }}
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
tags: |
|
||||
type=raw,value=${{ steps.tag_version.outputs.new_tag }}
|
||||
type=raw,value=latest
|
||||
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
32
.github/workflows/docs-deploy.yml
vendored
Normal file
32
.github/workflows/docs-deploy.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
name: Deploy Documentation
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- 'docs/**'
|
||||
- 'mkdocs.yml'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy-docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: 3.x
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
pip install mkdocs-material
|
||||
pip install mkdocs
|
||||
|
||||
- name: Deploy documentation
|
||||
run: mkdocs gh-deploy --force
|
||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -65,7 +65,6 @@ home-assistant_v2.db-*
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
pnpm-lock.yaml
|
||||
bun.lockb
|
||||
|
||||
coverage/*
|
||||
coverage/
|
||||
@@ -73,3 +72,19 @@ coverage/
|
||||
.env
|
||||
.env.*
|
||||
!.env.*.template
|
||||
|
||||
.cursor/
|
||||
.cursor/*
|
||||
|
||||
.bun/
|
||||
.cursorconfig
|
||||
bun.lockb
|
||||
|
||||
# MkDocs
|
||||
site/
|
||||
.site/
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
74
Dockerfile
74
Dockerfile
@@ -1,20 +1,70 @@
|
||||
# Use Bun as the base image
|
||||
FROM oven/bun:1.0.26
|
||||
# Use Node.js as base for building
|
||||
FROM node:20-slim as builder
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
# Install bun
|
||||
RUN npm install -g bun@1.0.25
|
||||
|
||||
# Install dependencies
|
||||
RUN bun install
|
||||
# Install only the minimal dependencies needed and clean up in the same layer
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/cache/apt/*
|
||||
|
||||
# Build TypeScript
|
||||
RUN bun run build
|
||||
# Set build-time environment variables
|
||||
ENV NODE_ENV=production \
|
||||
NODE_OPTIONS="--max-old-space-size=2048" \
|
||||
BUN_INSTALL_CACHE=0
|
||||
|
||||
# Expose the port the app runs on
|
||||
EXPOSE 3000
|
||||
# Copy only package files first
|
||||
COPY package.json ./
|
||||
|
||||
# Start the application
|
||||
CMD ["bun", "run", "start"]
|
||||
# Install dependencies with a clean slate
|
||||
RUN rm -rf node_modules .bun bun.lockb && \
|
||||
bun install --no-save
|
||||
|
||||
# Copy source files and build
|
||||
COPY src ./src
|
||||
COPY tsconfig*.json ./
|
||||
RUN bun build ./src/index.ts --target=bun --minify --outdir=./dist
|
||||
|
||||
# Create a smaller production image
|
||||
FROM node:20-slim as runner
|
||||
|
||||
# Install bun in production image
|
||||
RUN npm install -g bun@1.0.25
|
||||
|
||||
# Set production environment variables
|
||||
ENV NODE_ENV=production \
|
||||
NODE_OPTIONS="--max-old-space-size=1024"
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --system --gid 1001 nodejs && \
|
||||
adduser --system --uid 1001 bunjs
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy only the necessary files from builder
|
||||
COPY --from=builder --chown=bunjs:nodejs /app/dist ./dist
|
||||
COPY --from=builder --chown=bunjs:nodejs /app/node_modules ./node_modules
|
||||
COPY --chown=bunjs:nodejs package.json ./
|
||||
|
||||
# Create logs directory with proper permissions
|
||||
RUN mkdir -p /app/logs && chown -R bunjs:nodejs /app/logs
|
||||
|
||||
# Switch to non-root user
|
||||
USER bunjs
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:4000/health || exit 1
|
||||
|
||||
# Expose port
|
||||
EXPOSE 4000
|
||||
|
||||
# Start the application with optimized flags
|
||||
CMD ["bun", "--smol", "run", "start"]
|
||||
582
README.md
582
README.md
@@ -1,400 +1,288 @@
|
||||
# Model Context Protocol Server for Home Assistant
|
||||
# 🚀 MCP Server for Home Assistant - Bringing AI-Powered Smart Homes to Life!
|
||||
|
||||
The server uses the MCP protocol to share access to a local Home Assistant instance with an LLM application.
|
||||
[](LICENSE)
|
||||
[](https://bun.sh)
|
||||
[](https://www.typescriptlang.org)
|
||||
[](#)
|
||||
[](https://jango-blockchained.github.io/homeassistant-mcp/)
|
||||
[](https://www.docker.com)
|
||||
|
||||
A powerful bridge between your Home Assistant instance and Language Learning Models (LLMs), enabling natural language control and monitoring of your smart home devices through the Model Context Protocol (MCP). This server provides a comprehensive API for managing your entire Home Assistant ecosystem, from device control to system administration.
|
||||
---
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||

|
||||

|
||||
## Overview 🌐
|
||||
|
||||
## Features
|
||||
Welcome to the **Model Context Protocol (MCP) Server for Home Assistant**! This robust platform bridges Home Assistant with cutting-edge Language Learning Models (LLMs), enabling natural language interactions and real-time automation of your smart devices. Imagine entering your home, saying:
|
||||
|
||||
- 🎮 **Device Control**: Control any Home Assistant device through natural language
|
||||
- 🔄 **Real-time Updates**: Get instant updates through Server-Sent Events (SSE)
|
||||
- 🤖 **Automation Management**: Create, update, and manage automations
|
||||
- 📊 **State Monitoring**: Track and query device states
|
||||
- 🔐 **Secure**: Token-based authentication and rate limiting
|
||||
- 📱 **Mobile Ready**: Works with any HTTP-capable client
|
||||
> “Hey MCP, dim the lights and start my evening playlist,”
|
||||
|
||||
## Real-time Updates with SSE
|
||||
and watching your home transform instantly—that's the magic that MCP Server delivers!
|
||||
|
||||
The server includes a powerful Server-Sent Events (SSE) system that provides real-time updates from your Home Assistant instance. This allows you to:
|
||||
---
|
||||
|
||||
- 🔄 Get instant state changes for any device
|
||||
- 📡 Monitor automation triggers and executions
|
||||
- 🎯 Subscribe to specific domains or entities
|
||||
- 📊 Track service calls and script executions
|
||||
## Key Benefits ✨
|
||||
|
||||
### Quick SSE Example
|
||||
### 🎮 Device Control & Monitoring
|
||||
- **Voice-Controlled Automation:**
|
||||
Use simple commands like "Turn on the kitchen lights" or "Set the thermostat to 22°C" without touching a switch.
|
||||
**Real-World Example:**
|
||||
In the morning, say "Good morning! Open the blinds and start the coffee machine" to kickstart your day automatically.
|
||||
|
||||
- **Real-Time Communication:**
|
||||
Experience sub-100ms latency updates via Server-Sent Events (SSE) or WebSocket connections, ensuring your dashboard is always current.
|
||||
**Real-World Example:**
|
||||
Monitor energy usage instantly during peak hours and adjust remotely for efficient consumption.
|
||||
|
||||
- **Seamless Automation:**
|
||||
Create scene-based rules to synchronize multiple devices effortlessly.
|
||||
**Real-World Example:**
|
||||
For movie nights, have MCP dim the lights, adjust the sound system, and launch your favorite streaming app with just one command.
|
||||
|
||||
### 🤖 AI-Powered Enhancements
|
||||
- **Natural Language Processing (NLP):**
|
||||
Convert everyday speech into actionable commands—just say, "Prepare the house for dinner," and MCP will adjust lighting, temperature, and even play soft background music.
|
||||
|
||||
- **Predictive Automation & Suggestions:**
|
||||
Receive proactive recommendations based on usage habits and environmental trends.
|
||||
**Real-World Example:**
|
||||
When home temperature fluctuates unexpectedly, MCP suggests an optimal setting and notifies you immediately.
|
||||
|
||||
- **Anomaly Detection:**
|
||||
Continuously monitor device activity and alert you to unusual behavior, helping prevent malfunctions or potential security breaches.
|
||||
|
||||
---
|
||||
|
||||
## Architectural Overview 🏗
|
||||
|
||||
Our architecture is engineered for performance, scalability, and security. The following Mermaid diagram illustrates the data flow and component interactions:
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph Client
|
||||
A[Client Application<br/>(Web / Mobile / Voice)]
|
||||
end
|
||||
subgraph CDN
|
||||
B[CDN / Cache]
|
||||
end
|
||||
subgraph Server
|
||||
C[Bun Native Server]
|
||||
E[NLP Engine<br/>& Language Processing Module]
|
||||
end
|
||||
subgraph Integration
|
||||
D[Home Assistant<br/>(Devices, Lights, Thermostats)]
|
||||
end
|
||||
|
||||
A -->|HTTP Request| B
|
||||
B -- Cache Miss --> C
|
||||
C -->|Interpret Command| E
|
||||
E -->|Determine Action| D
|
||||
D -->|Return State/Action| C
|
||||
C -->|Response| B
|
||||
B -->|Cached/Processed Response| A
|
||||
```
|
||||
|
||||
Learn more about our architecture in the [Architecture Documentation](docs/architecture.md).
|
||||
|
||||
---
|
||||
|
||||
## Technical Stack 🔧
|
||||
|
||||
Our solution is built on a modern, high-performance stack that powers every feature:
|
||||
|
||||
- **Bun:**
|
||||
A next-generation JavaScript runtime offering rapid startup times, native TypeScript support, and high performance.
|
||||
👉 [Learn about Bun](https://bun.sh)
|
||||
|
||||
- **Bun Native Server:**
|
||||
Utilizes Bun's built-in HTTP server to efficiently process API requests with sub-100ms response times.
|
||||
👉 See the [Installation Guide](docs/getting-started/installation.md) for details.
|
||||
|
||||
- **Natural Language Processing (NLP) & LLM Integration:**
|
||||
Processes and interprets natural language commands using state-of-the-art LLMs and custom NLP modules.
|
||||
👉 Find API usage details in the [API Documentation](docs/api.md).
|
||||
|
||||
- **Home Assistant Integration:**
|
||||
Provides seamless connectivity with Home Assistant, ensuring flawless communication with your smart devices.
|
||||
👉 Refer to the [Usage Guide](docs/usage.md) for more information.
|
||||
|
||||
- **Redis Cache:**
|
||||
Enables rapid data retrieval and session persistence essential for real-time updates.
|
||||
|
||||
- **TypeScript:**
|
||||
Enhances type safety and developer productivity across the entire codebase.
|
||||
|
||||
- **JWT & Security Middleware:**
|
||||
Protects your ecosystem with JWT-based authentication, request sanitization, rate-limiting, and encryption.
|
||||
|
||||
- **Containerization with Docker:**
|
||||
Enables scalable, isolated deployments for production environments.
|
||||
|
||||
For further technical details, check out our [Documentation Index](docs/index.md).
|
||||
|
||||
---
|
||||
|
||||
## Installation 🛠
|
||||
|
||||
### 🐳 Docker Setup (Recommended)
|
||||
|
||||
For a hassle-free, containerized deployment:
|
||||
|
||||
```bash
|
||||
# 1. Clone the repository (using a shallow copy for efficiency)
|
||||
git clone --depth 1 https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||
|
||||
# 2. Configure your environment: copy the example file and edit it with your Home Assistant credentials
|
||||
cp .env.example .env # Modify .env with your Home Assistant host, tokens, etc.
|
||||
|
||||
# 3. Build and run the Docker containers
|
||||
docker compose up -d --build
|
||||
|
||||
# 4. View real-time logs (last 50 log entries)
|
||||
docker compose logs -f --tail=50
|
||||
```
|
||||
|
||||
👉 Refer to our [Installation Guide](docs/getting-started/installation.md) for full details.
|
||||
|
||||
### 💻 Bare Metal Installation
|
||||
|
||||
For direct deployment on your host machine:
|
||||
|
||||
```bash
|
||||
# 1. Install Bun (if not already installed)
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
|
||||
# 2. Install project dependencies with caching support
|
||||
bun install --frozen-lockfile
|
||||
|
||||
# 3. Launch the server in development mode with hot-reload enabled
|
||||
bun run dev --watch
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Real-World Usage Examples 🔍
|
||||
|
||||
### 📱 Smart Home Dashboard Integration
|
||||
Integrate MCP's real-time updates into your custom dashboard for a dynamic smart home experience:
|
||||
|
||||
```javascript
|
||||
const eventSource = new EventSource(
|
||||
'http://localhost:3000/subscribe_events?token=YOUR_TOKEN&domain=light'
|
||||
);
|
||||
const eventSource = new EventSource('http://localhost:3000/subscribe_events?token=YOUR_TOKEN&domain=light');
|
||||
|
||||
eventSource.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
console.log('Update received:', data);
|
||||
console.log('Real-time update:', data);
|
||||
// Update your UI dashboard, e.g., refresh a light intensity indicator.
|
||||
};
|
||||
```
|
||||
|
||||
See [SSE_API.md](docs/SSE_API.md) for complete documentation of the SSE system.
|
||||
### 🏠 Voice-Activated Control
|
||||
Utilize voice commands to trigger actions with minimal effort:
|
||||
|
||||
## Table of Contents
|
||||
```javascript
|
||||
// Establish a WebSocket connection for real-time command processing
|
||||
const ws = new WebSocket('wss://mcp.yourha.com/ws');
|
||||
|
||||
- [Key Features](#key-features)
|
||||
- [Prerequisites](#prerequisites)
|
||||
- [Installation](#installation)
|
||||
- [Basic Setup](#basic-setup)
|
||||
- [Docker Setup (Recommended)](#docker-setup-recommended)
|
||||
- [Configuration](#configuration)
|
||||
- [Development](#development)
|
||||
- [API Reference](#api-reference)
|
||||
- [OpenAI Integration](#openai-integration)
|
||||
- [Natural Language Integration](#natural-language-integration)
|
||||
- [Troubleshooting](#troubleshooting)
|
||||
- [Project Status](#project-status)
|
||||
- [Contributing](#contributing)
|
||||
- [Resources](#resources)
|
||||
- [License](#license)
|
||||
ws.onmessage = ({ data }) => {
|
||||
const update = JSON.parse(data);
|
||||
if (update.entity_id === 'light.living_room') {
|
||||
console.log('Adjusting living room lighting based on voice command...');
|
||||
// Additional logic to update your UI or trigger further actions can go here.
|
||||
}
|
||||
};
|
||||
|
||||
## Key Features
|
||||
// Simulate processing a voice command
|
||||
function simulateVoiceCommand(command) {
|
||||
console.log("Processing voice command:", command);
|
||||
// Integrate with your actual voice-to-text system as needed.
|
||||
}
|
||||
|
||||
### Core Functionality 🎮
|
||||
- **Smart Device Control**
|
||||
- 💡 **Lights**: Brightness, color temperature, RGB color
|
||||
- 🌡️ **Climate**: Temperature, HVAC modes, fan modes, humidity
|
||||
- 🚪 **Covers**: Position and tilt control
|
||||
- 🔌 **Switches**: On/off control
|
||||
- 🚨 **Sensors & Contacts**: State monitoring
|
||||
- 🎵 **Media Players**: Playback control, volume, source selection
|
||||
- 🌪️ **Fans**: Speed, oscillation, direction
|
||||
- 🔒 **Locks**: Lock/unlock control
|
||||
- 🧹 **Vacuums**: Start, stop, return to base
|
||||
- 📹 **Cameras**: Motion detection, snapshots
|
||||
|
||||
### System Management 🛠️
|
||||
- **Add-on Management**
|
||||
- Browse available add-ons
|
||||
- Install/uninstall add-ons
|
||||
- Start/stop/restart add-ons
|
||||
- Version management
|
||||
- Configuration access
|
||||
|
||||
- **Package Management (HACS)**
|
||||
- Integration with Home Assistant Community Store
|
||||
- Multiple package types support:
|
||||
- Custom integrations
|
||||
- Frontend themes
|
||||
- Python scripts
|
||||
- AppDaemon apps
|
||||
- NetDaemon apps
|
||||
- Version control and updates
|
||||
- Repository management
|
||||
|
||||
- **Automation Management**
|
||||
- Create and edit automations
|
||||
- Advanced configuration options:
|
||||
- Multiple trigger types
|
||||
- Complex conditions
|
||||
- Action sequences
|
||||
- Execution modes
|
||||
- Duplicate and modify existing automations
|
||||
- Enable/disable automation rules
|
||||
- Trigger automation manually
|
||||
|
||||
### Architecture Features 🏗️
|
||||
- **Intelligent Organization**
|
||||
- Area and floor-based device grouping
|
||||
- State monitoring and querying
|
||||
- Smart context awareness
|
||||
- Historical data access
|
||||
|
||||
- **Robust Architecture**
|
||||
- Comprehensive error handling
|
||||
- State validation
|
||||
- Secure API integration
|
||||
- TypeScript type safety
|
||||
- Extensive test coverage
|
||||
|
||||
## Prerequisites
|
||||
|
||||
- **Node.js** 20.10.0 or higher
|
||||
- **NPM** package manager
|
||||
- **Docker Compose** for containerization
|
||||
- Running **Home Assistant** instance
|
||||
- Home Assistant long-lived access token ([How to get token](https://community.home-assistant.io/t/how-to-get-long-lived-access-token/162159))
|
||||
- **HACS** installed for package management features
|
||||
- **Supervisor** access for add-on management
|
||||
|
||||
## Installation
|
||||
|
||||
### Basic Setup
|
||||
|
||||
```bash
|
||||
# Clone the repository
|
||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||
cd homeassistant-mcp
|
||||
|
||||
# Install dependencies
|
||||
npm install
|
||||
|
||||
# Build the project
|
||||
npm run build
|
||||
simulateVoiceCommand("Turn off all the lights for bedtime");
|
||||
```
|
||||
|
||||
### Docker Setup (Recommended)
|
||||
👉 Learn more in our [Usage Guide](docs/usage.md).
|
||||
|
||||
The project includes Docker support for easy deployment and consistent environments across different platforms.
|
||||
---
|
||||
|
||||
## Update Strategy 🔄
|
||||
|
||||
Maintain a seamless operation with zero downtime updates:
|
||||
|
||||
1. **Clone the repository:**
|
||||
```bash
|
||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||
cd homeassistant-mcp
|
||||
```
|
||||
# 1. Pull the latest Docker images
|
||||
docker compose pull
|
||||
|
||||
2. **Configure environment:**
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
Edit the `.env` file with your Home Assistant configuration:
|
||||
```env
|
||||
# Home Assistant Configuration
|
||||
HASS_HOST=http://homeassistant.local:8123
|
||||
HASS_TOKEN=your_home_assistant_token
|
||||
HASS_SOCKET_URL=ws://homeassistant.local:8123/api/websocket
|
||||
|
||||
# Server Configuration
|
||||
PORT=3000
|
||||
NODE_ENV=production
|
||||
DEBUG=false
|
||||
```
|
||||
|
||||
3. **Build and run with Docker Compose:**
|
||||
```bash
|
||||
# Build and start the containers
|
||||
docker compose up -d
|
||||
|
||||
# View logs
|
||||
docker compose logs -f
|
||||
|
||||
# Stop the service
|
||||
docker compose down
|
||||
```
|
||||
|
||||
4. **Verify the installation:**
|
||||
The server should now be running at `http://localhost:3000`. You can check the health endpoint at `http://localhost:3000/health`.
|
||||
|
||||
5. **Update the application:**
|
||||
```bash
|
||||
# Pull the latest changes
|
||||
git pull
|
||||
|
||||
# Rebuild and restart the containers
|
||||
# 2. Rebuild and restart containers smoothly
|
||||
docker compose up -d --build
|
||||
|
||||
# 3. Clean up unused Docker images to free up space
|
||||
docker system prune -f
|
||||
```
|
||||
|
||||
#### Docker Configuration
|
||||
For more details, review our [Troubleshooting & Updates](docs/troubleshooting.md).
|
||||
|
||||
The Docker setup includes:
|
||||
- Multi-stage build for optimal image size
|
||||
- Health checks for container monitoring
|
||||
- Volume mounting for environment configuration
|
||||
- Automatic container restart on failure
|
||||
- Exposed port 3000 for API access
|
||||
---
|
||||
|
||||
#### Docker Compose Environment Variables
|
||||
## Security Features 🔐
|
||||
|
||||
All environment variables can be configured in the `.env` file. The following variables are supported:
|
||||
- `HASS_HOST`: Your Home Assistant instance URL
|
||||
- `HASS_TOKEN`: Long-lived access token for Home Assistant
|
||||
- `HASS_SOCKET_URL`: WebSocket URL for Home Assistant
|
||||
- `PORT`: Server port (default: 3000)
|
||||
- `NODE_ENV`: Environment (production/development)
|
||||
- `DEBUG`: Enable debug mode (true/false)
|
||||
We prioritize the security of your smart home with multiple layers of defense:
|
||||
- **JWT Authentication 🔑:** Secure, token-based API access to prevent unauthorized usage.
|
||||
- **Request Sanitization 🧼:** Automatic filtering and validation of API requests to combat injection attacks.
|
||||
- **Rate Limiting & Fail2Ban 🚫:** Monitors requests to prevent brute force and DDoS attacks.
|
||||
- **End-to-End Encryption 🔒:** Ensures that your commands and data remain private during transmission.
|
||||
|
||||
## Configuration
|
||||
---
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```env
|
||||
# Home Assistant Configuration
|
||||
HASS_HOST=http://homeassistant.local:8123 # Your Home Assistant instance URL
|
||||
HASS_TOKEN=your_home_assistant_token # Long-lived access token
|
||||
HASS_SOCKET_URL=ws://homeassistant.local:8123/api/websocket # WebSocket URL
|
||||
|
||||
# Server Configuration
|
||||
PORT=3000 # Server port (default: 3000)
|
||||
NODE_ENV=production # Environment (production/development)
|
||||
DEBUG=false # Enable debug mode
|
||||
|
||||
# Test Configuration
|
||||
TEST_HASS_HOST=http://localhost:8123 # Test instance URL
|
||||
TEST_HASS_TOKEN=test_token # Test token
|
||||
```
|
||||
|
||||
### Configuration Files
|
||||
|
||||
1. **Development**: Copy `.env.example` to `.env.development`
|
||||
2. **Production**: Copy `.env.example` to `.env.production`
|
||||
3. **Testing**: Copy `.env.example` to `.env.test`
|
||||
|
||||
## Development
|
||||
## Contributing 🤝
|
||||
|
||||
We value community contributions! Here's how you can help improve MCP Server:
|
||||
1. **Fork the Repository 🍴**
|
||||
Create your own copy of the project.
|
||||
2. **Create a Feature Branch 🌿**
|
||||
```bash
|
||||
# Development mode with hot reload
|
||||
npm run dev
|
||||
|
||||
# Build project
|
||||
npm run build
|
||||
|
||||
# Production mode
|
||||
npm run start
|
||||
|
||||
# Run tests
|
||||
npx jest --config=jest.config.cjs
|
||||
|
||||
# Run tests with coverage
|
||||
npx jest --coverage
|
||||
|
||||
# Lint code
|
||||
npm run lint
|
||||
|
||||
# Format code
|
||||
npm run format
|
||||
git checkout -b feature/your-feature-name
|
||||
```
|
||||
|
||||
## API Reference
|
||||
|
||||
For detailed API documentation, please refer to:
|
||||
- [API Documentation](docs/API.md) - Complete API reference
|
||||
- [SSE API Documentation](docs/SSE_API.md) - Server-Sent Events documentation
|
||||
|
||||
## OpenAI Integration
|
||||
|
||||
The server includes powerful AI analysis capabilities powered by OpenAI's GPT-4 model. This feature provides intelligent analysis of your Home Assistant setup through two main modes:
|
||||
|
||||
### 1. Standard Analysis
|
||||
|
||||
Performs a comprehensive system analysis including:
|
||||
- System Overview
|
||||
- Performance Analysis
|
||||
- Security Assessment
|
||||
- Optimization Recommendations
|
||||
- Maintenance Tasks
|
||||
|
||||
3. **Install Dependencies & Run Tests 🧪**
|
||||
```bash
|
||||
# Run standard analysis
|
||||
npm run test:openai
|
||||
# Select option 1 when prompted
|
||||
bun install
|
||||
bun test --coverage
|
||||
```
|
||||
4. **Make Your Changes & Commit 📝**
|
||||
Follow the [Conventional Commits](https://www.conventionalcommits.org) guidelines.
|
||||
5. **Open a Pull Request 🔀**
|
||||
Submit your changes for review.
|
||||
|
||||
### 2. Custom Prompt Analysis
|
||||
Read more in our [Contribution Guidelines](docs/contributing.md).
|
||||
|
||||
Allows you to ask specific questions about your Home Assistant setup. The analysis can include:
|
||||
- Device States
|
||||
- Configuration Details
|
||||
- Active Devices
|
||||
- Device Attributes (brightness, temperature, etc.)
|
||||
---
|
||||
|
||||
```bash
|
||||
# Run custom analysis
|
||||
npm run test:openai
|
||||
# Select option 2 when prompted
|
||||
```
|
||||
## Roadmap & Future Enhancements 🔮
|
||||
|
||||
### Configuration
|
||||
We're continuously evolving MCP Server. Upcoming features include:
|
||||
- **AI Assistant Integration (Q4 2024):**
|
||||
Smarter, context-aware voice commands and personalized automation.
|
||||
- **Predictive Automation (Q1 2025):**
|
||||
Enhanced scheduling capabilities powered by advanced AI.
|
||||
- **Enhanced Security (Q2 2024):**
|
||||
Introduction of multi-factor authentication, advanced monitoring, and rigorous encryption methods.
|
||||
- **Performance Optimizations (Q3 2024):**
|
||||
Reducing latency further, optimizing caching, and improving load balancing.
|
||||
|
||||
To use the OpenAI integration, you need to set up your OpenAI API key in the `.env` file:
|
||||
```env
|
||||
OPENAI_API_KEY=your_openai_api_key
|
||||
```
|
||||
For more details, see our [Roadmap](docs/roadmap.md).
|
||||
|
||||
## Troubleshooting
|
||||
---
|
||||
|
||||
### Common Issues
|
||||
## Community & Support 🌍
|
||||
|
||||
1. **Node.js Version (`toSorted is not a function`)**
|
||||
- **Solution:** Update to Node.js 20.10.0+
|
||||
```bash
|
||||
nvm install 20.10.0
|
||||
nvm use 20.10.0
|
||||
```
|
||||
Your feedback and collaboration are vital! Join our community:
|
||||
- **GitHub Issues:** Report bugs or request features via our [Issues Page](https://github.com/jango-blockchained/homeassistant-mcp/issues).
|
||||
- **Discord & Slack:** Connect with fellow users and developers in real-time.
|
||||
- **Documentation:** Find comprehensive guides on the [MCP Documentation Website](https://jango-blockchained.github.io/homeassistant-mcp/).
|
||||
|
||||
2. **Connection Issues**
|
||||
- Verify Home Assistant is running
|
||||
- Check `HASS_HOST` accessibility
|
||||
- Validate token permissions
|
||||
- Ensure WebSocket connection for real-time updates
|
||||
---
|
||||
|
||||
3. **Add-on Management Issues**
|
||||
- Verify Supervisor access
|
||||
- Check add-on compatibility
|
||||
- Validate system resources
|
||||
## License 📜
|
||||
|
||||
4. **HACS Integration Issues**
|
||||
- Verify HACS installation
|
||||
- Check HACS integration status
|
||||
- Validate repository access
|
||||
This project is licensed under the MIT License. See [LICENSE](LICENSE) for full details.
|
||||
|
||||
5. **Automation Issues**
|
||||
- Verify entity availability
|
||||
- Check trigger conditions
|
||||
- Validate service calls
|
||||
- Monitor execution logs
|
||||
---
|
||||
|
||||
## Project Status
|
||||
|
||||
✅ **Complete**
|
||||
- Entity, Floor, and Area access
|
||||
- Device control (Lights, Climate, Covers, Switches, Contacts)
|
||||
- Add-on management system
|
||||
- Package management through HACS
|
||||
- Advanced automation configuration
|
||||
- Basic state management
|
||||
- Error handling and validation
|
||||
- Docker containerization
|
||||
- Jest testing setup
|
||||
- TypeScript integration
|
||||
- Environment variable management
|
||||
- Home Assistant API integration
|
||||
- Project documentation
|
||||
|
||||
🚧 **In Progress**
|
||||
- WebSocket implementation for real-time updates
|
||||
- Enhanced security features
|
||||
- Tool organization optimization
|
||||
- Performance optimization
|
||||
- Resource context integration
|
||||
- API documentation generation
|
||||
- Multi-platform desktop integration
|
||||
- Advanced error recovery
|
||||
- Custom prompt testing
|
||||
- Enhanced macOS integration
|
||||
- Type safety improvements
|
||||
- Testing coverage expansion
|
||||
|
||||
## Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Implement your changes
|
||||
4. Add tests for new functionality
|
||||
5. Ensure all tests pass
|
||||
6. Submit a pull request
|
||||
|
||||
## Resources
|
||||
|
||||
- [MCP Documentation](https://modelcontextprotocol.io/introduction)
|
||||
- [Home Assistant Docs](https://www.home-assistant.io)
|
||||
- [HA REST API](https://developers.home-assistant.io/docs/api/rest)
|
||||
- [HACS Documentation](https://hacs.xyz)
|
||||
- [TypeScript Documentation](https://www.typescriptlang.org/docs)
|
||||
|
||||
## License
|
||||
|
||||
MIT License - See [LICENSE](LICENSE) file
|
||||
🔋 Batteries included.
|
||||
@@ -1,212 +1,314 @@
|
||||
import { TokenManager, validateRequest, sanitizeInput, errorHandler } from '../../src/security/index.js';
|
||||
import { Request, Response } from 'express';
|
||||
import { TokenManager, validateRequest, sanitizeInput, errorHandler, rateLimiter, securityHeaders } from '../../src/security/index.js';
|
||||
import { mock, describe, it, expect, beforeEach, afterEach } from 'bun:test';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
const TEST_SECRET = 'test-secret-that-is-long-enough-for-testing-purposes';
|
||||
|
||||
describe('Security Module', () => {
|
||||
beforeEach(() => {
|
||||
process.env.JWT_SECRET = TEST_SECRET;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.JWT_SECRET;
|
||||
});
|
||||
|
||||
describe('TokenManager', () => {
|
||||
const testToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiZXhwIjoxNzE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c';
|
||||
const encryptionKey = 'test_encryption_key';
|
||||
const testToken = 'test-token';
|
||||
const encryptionKey = 'test-encryption-key-that-is-long-enough';
|
||||
|
||||
it('should encrypt and decrypt tokens', () => {
|
||||
const encrypted = TokenManager.encryptToken(testToken, encryptionKey);
|
||||
const decrypted = TokenManager.decryptToken(encrypted, encryptionKey);
|
||||
expect(encrypted).toContain('aes-256-gcm:');
|
||||
|
||||
const decrypted = TokenManager.decryptToken(encrypted, encryptionKey);
|
||||
expect(decrypted).toBe(testToken);
|
||||
});
|
||||
|
||||
it('should validate tokens correctly', () => {
|
||||
expect(TokenManager.validateToken(testToken)).toBe(true);
|
||||
expect(TokenManager.validateToken('invalid_token')).toBe(false);
|
||||
expect(TokenManager.validateToken('')).toBe(false);
|
||||
const validToken = jwt.sign({ data: 'test' }, TEST_SECRET, { expiresIn: '1h' });
|
||||
const result = TokenManager.validateToken(validToken);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle empty tokens', () => {
|
||||
const result = TokenManager.validateToken('');
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Invalid token format');
|
||||
});
|
||||
|
||||
it('should handle expired tokens', () => {
|
||||
const expiredToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiZXhwIjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c';
|
||||
expect(TokenManager.validateToken(expiredToken)).toBe(false);
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
data: 'test',
|
||||
iat: now - 7200, // 2 hours ago
|
||||
exp: now - 3600 // expired 1 hour ago
|
||||
};
|
||||
const token = jwt.sign(payload, TEST_SECRET);
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Token has expired');
|
||||
});
|
||||
|
||||
it('should handle invalid token format', () => {
|
||||
const result = TokenManager.validateToken('invalid-token');
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Invalid token format');
|
||||
});
|
||||
|
||||
it('should handle missing JWT secret', () => {
|
||||
delete process.env.JWT_SECRET;
|
||||
const payload = { data: 'test' };
|
||||
const token = jwt.sign(payload, 'some-secret');
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('JWT secret not configured');
|
||||
});
|
||||
|
||||
it('should handle rate limiting for failed attempts', () => {
|
||||
const invalidToken = 'x'.repeat(64);
|
||||
const testIp = '127.0.0.1';
|
||||
|
||||
// First attempt
|
||||
const firstResult = TokenManager.validateToken(invalidToken, testIp);
|
||||
expect(firstResult.valid).toBe(false);
|
||||
|
||||
// Multiple failed attempts
|
||||
for (let i = 0; i < 4; i++) {
|
||||
TokenManager.validateToken(invalidToken, testIp);
|
||||
}
|
||||
|
||||
// Next attempt should be rate limited
|
||||
const limitedResult = TokenManager.validateToken(invalidToken, testIp);
|
||||
expect(limitedResult.valid).toBe(false);
|
||||
expect(limitedResult.error).toBe('Too many failed attempts. Please try again later.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request Validation', () => {
|
||||
let mockRequest: Partial<Request>;
|
||||
let mockResponse: Partial<Response>;
|
||||
let mockNext: jest.Mock;
|
||||
let mockRequest: any;
|
||||
let mockResponse: any;
|
||||
let mockNext: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockRequest = {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
authorization: 'Bearer validToken'
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
is: jest.fn().mockReturnValue(true),
|
||||
body: { test: 'data' }
|
||||
body: {},
|
||||
ip: '127.0.0.1'
|
||||
};
|
||||
|
||||
mockResponse = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn()
|
||||
status: mock(() => mockResponse),
|
||||
json: mock(() => mockResponse),
|
||||
setHeader: mock(() => mockResponse),
|
||||
removeHeader: mock(() => mockResponse)
|
||||
};
|
||||
mockNext = jest.fn();
|
||||
|
||||
mockNext = mock(() => { });
|
||||
});
|
||||
|
||||
it('should pass valid requests', () => {
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
if (mockRequest.headers) {
|
||||
mockRequest.headers.authorization = 'Bearer valid-token';
|
||||
}
|
||||
const validateTokenSpy = mock(() => ({ valid: true }));
|
||||
TokenManager.validateToken = validateTokenSpy;
|
||||
|
||||
validateRequest(mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockNext).toHaveBeenCalled();
|
||||
expect(mockResponse.status).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reject invalid content type', () => {
|
||||
mockRequest.is = jest.fn().mockReturnValue(false);
|
||||
if (mockRequest.headers) {
|
||||
mockRequest.headers['content-type'] = 'text/plain';
|
||||
}
|
||||
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
validateRequest(mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(415);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Unsupported Media Type - Content-Type must be application/json'
|
||||
success: false,
|
||||
message: 'Unsupported Media Type',
|
||||
error: 'Content-Type must be application/json',
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject missing token', () => {
|
||||
mockRequest.headers = {};
|
||||
if (mockRequest.headers) {
|
||||
delete mockRequest.headers.authorization;
|
||||
}
|
||||
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
validateRequest(mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Invalid or expired token'
|
||||
success: false,
|
||||
message: 'Unauthorized',
|
||||
error: 'Missing or invalid authorization header',
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
|
||||
it('should reject invalid request body', () => {
|
||||
mockRequest.body = null;
|
||||
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
validateRequest(mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(400);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Invalid request body'
|
||||
success: false,
|
||||
message: 'Bad Request',
|
||||
error: 'Invalid request body structure',
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Sanitization', () => {
|
||||
let mockRequest: Partial<Request>;
|
||||
let mockResponse: Partial<Response>;
|
||||
let mockNext: jest.Mock;
|
||||
let mockRequest: any;
|
||||
let mockResponse: any;
|
||||
let mockNext: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockRequest = {
|
||||
body: {}
|
||||
};
|
||||
mockResponse = {};
|
||||
mockNext = jest.fn();
|
||||
});
|
||||
|
||||
it('should sanitize HTML tags from request body', () => {
|
||||
mockRequest.body = {
|
||||
text: 'Test <script>alert("xss")</script>',
|
||||
nested: {
|
||||
html: '<img src="x" onerror="alert(1)">'
|
||||
}
|
||||
};
|
||||
|
||||
sanitizeInput(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
|
||||
expect(mockRequest.body).toEqual({
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
body: {
|
||||
text: 'Test alert("xss")',
|
||||
nested: {
|
||||
html: 'img src="x" onerror="alert(1)"'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
mockResponse = {
|
||||
status: mock(() => mockResponse),
|
||||
json: mock(() => mockResponse)
|
||||
};
|
||||
|
||||
mockNext = mock(() => { });
|
||||
});
|
||||
|
||||
it('should sanitize HTML tags from request body', () => {
|
||||
sanitizeInput(mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockRequest.body).toEqual({
|
||||
text: 'Test',
|
||||
nested: {
|
||||
html: ''
|
||||
}
|
||||
});
|
||||
expect(mockNext).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle non-object body', () => {
|
||||
mockRequest.body = 'string body';
|
||||
|
||||
sanitizeInput(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
|
||||
expect(mockRequest.body).toBe('string body');
|
||||
sanitizeInput(mockRequest, mockResponse, mockNext);
|
||||
expect(mockNext).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handler', () => {
|
||||
let mockRequest: Partial<Request>;
|
||||
let mockResponse: Partial<Response>;
|
||||
let mockNext: jest.Mock;
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
let mockRequest: any;
|
||||
let mockResponse: any;
|
||||
let mockNext: any;
|
||||
|
||||
beforeEach(() => {
|
||||
mockRequest = {};
|
||||
mockResponse = {
|
||||
status: jest.fn().mockReturnThis(),
|
||||
json: jest.fn()
|
||||
mockRequest = {
|
||||
method: 'POST',
|
||||
ip: '127.0.0.1'
|
||||
};
|
||||
mockNext = jest.fn();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
mockResponse = {
|
||||
status: mock(() => mockResponse),
|
||||
json: mock(() => mockResponse)
|
||||
};
|
||||
|
||||
mockNext = mock(() => { });
|
||||
});
|
||||
|
||||
it('should handle errors in production mode', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
const error = new Error('Test error');
|
||||
|
||||
errorHandler(
|
||||
error,
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
errorHandler(error, mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Internal Server Error',
|
||||
message: undefined
|
||||
success: false,
|
||||
message: 'Internal Server Error',
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
|
||||
it('should include error message in development mode', () => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
const error = new Error('Test error');
|
||||
|
||||
errorHandler(
|
||||
error,
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
mockNext
|
||||
);
|
||||
errorHandler(error, mockRequest, mockResponse, mockNext);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Internal Server Error',
|
||||
message: 'Test error'
|
||||
success: false,
|
||||
message: 'Internal Server Error',
|
||||
error: 'Test error',
|
||||
stack: expect.any(String),
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiter', () => {
|
||||
it('should limit requests after threshold', async () => {
|
||||
const mockContext = {
|
||||
request: new Request('http://localhost', {
|
||||
headers: new Headers({
|
||||
'x-forwarded-for': '127.0.0.1'
|
||||
})
|
||||
}),
|
||||
set: mock(() => { })
|
||||
};
|
||||
|
||||
// Test multiple requests
|
||||
for (let i = 0; i < 100; i++) {
|
||||
await rateLimiter.derive(mockContext);
|
||||
}
|
||||
|
||||
// The next request should throw
|
||||
try {
|
||||
await rateLimiter.derive(mockContext);
|
||||
expect(false).toBe(true); // Should not reach here
|
||||
} catch (error) {
|
||||
expect(error instanceof Error).toBe(true);
|
||||
expect(error.message).toBe('Too many requests from this IP, please try again later');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security Headers', () => {
|
||||
it('should set security headers', async () => {
|
||||
const mockHeaders = new Headers();
|
||||
const mockContext = {
|
||||
request: new Request('http://localhost', {
|
||||
headers: mockHeaders
|
||||
}),
|
||||
set: mock(() => { })
|
||||
};
|
||||
|
||||
await securityHeaders.derive(mockContext);
|
||||
|
||||
// Verify that security headers were set
|
||||
const headers = mockContext.request.headers;
|
||||
expect(headers.has('content-security-policy')).toBe(true);
|
||||
expect(headers.has('x-frame-options')).toBe(true);
|
||||
expect(headers.has('x-content-type-options')).toBe(true);
|
||||
expect(headers.has('referrer-policy')).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,177 +1,156 @@
|
||||
import { jest, describe, it, expect, beforeEach } from '@jest/globals';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { describe, it, expect } from 'bun:test';
|
||||
import {
|
||||
validateRequest,
|
||||
sanitizeInput,
|
||||
errorHandler,
|
||||
rateLimiter,
|
||||
securityHeaders
|
||||
checkRateLimit,
|
||||
validateRequestHeaders,
|
||||
sanitizeValue,
|
||||
applySecurityHeaders,
|
||||
handleError
|
||||
} from '../../src/security/index.js';
|
||||
|
||||
type MockRequest = {
|
||||
headers: {
|
||||
'content-type'?: string;
|
||||
authorization?: string;
|
||||
};
|
||||
body?: any;
|
||||
is: jest.MockInstance<string | false | null, [type: string | string[]]>;
|
||||
};
|
||||
describe('Security Middleware Utilities', () => {
|
||||
describe('Rate Limiter', () => {
|
||||
it('should allow requests under threshold', () => {
|
||||
const ip = '127.0.0.1';
|
||||
expect(() => checkRateLimit(ip, 10)).not.toThrow();
|
||||
});
|
||||
|
||||
type MockResponse = {
|
||||
status: jest.MockInstance<MockResponse, [code: number]>;
|
||||
json: jest.MockInstance<MockResponse, [body: any]>;
|
||||
setHeader: jest.MockInstance<MockResponse, [name: string, value: string]>;
|
||||
};
|
||||
it('should throw when requests exceed threshold', () => {
|
||||
const ip = '127.0.0.2';
|
||||
|
||||
describe('Security Middleware', () => {
|
||||
let mockRequest: MockRequest;
|
||||
let mockResponse: MockResponse;
|
||||
let nextFunction: jest.Mock;
|
||||
// Simulate multiple requests
|
||||
for (let i = 0; i < 11; i++) {
|
||||
if (i < 10) {
|
||||
expect(() => checkRateLimit(ip, 10)).not.toThrow();
|
||||
} else {
|
||||
expect(() => checkRateLimit(ip, 10)).toThrow('Too many requests from this IP, please try again later');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mockRequest = {
|
||||
headers: {},
|
||||
body: {},
|
||||
is: jest.fn<string | false | null, [string | string[]]>().mockReturnValue('json')
|
||||
};
|
||||
it('should reset rate limit after window expires', async () => {
|
||||
const ip = '127.0.0.3';
|
||||
|
||||
mockResponse = {
|
||||
status: jest.fn<MockResponse, [number]>().mockReturnThis(),
|
||||
json: jest.fn<MockResponse, [any]>().mockReturnThis(),
|
||||
setHeader: jest.fn<MockResponse, [string, string]>().mockReturnThis()
|
||||
};
|
||||
// Simulate multiple requests
|
||||
for (let i = 0; i < 11; i++) {
|
||||
if (i < 10) {
|
||||
expect(() => checkRateLimit(ip, 10, 50)).not.toThrow();
|
||||
}
|
||||
}
|
||||
|
||||
nextFunction = jest.fn();
|
||||
// Wait for rate limit window to expire
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
// Should be able to make requests again
|
||||
expect(() => checkRateLimit(ip, 10, 50)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Request Validation', () => {
|
||||
it('should pass valid requests', () => {
|
||||
mockRequest.headers.authorization = 'Bearer valid-token';
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
it('should validate content type', () => {
|
||||
const mockRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
}
|
||||
});
|
||||
|
||||
it('should reject requests without authorization header', () => {
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||
error: expect.stringContaining('authorization')
|
||||
}));
|
||||
expect(() => validateRequestHeaders(mockRequest)).not.toThrow();
|
||||
});
|
||||
|
||||
it('should reject requests with invalid authorization format', () => {
|
||||
mockRequest.headers.authorization = 'invalid-format';
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||
error: expect.stringContaining('Bearer')
|
||||
}));
|
||||
it('should reject invalid content type', () => {
|
||||
const mockRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'text/plain'
|
||||
}
|
||||
});
|
||||
|
||||
expect(() => validateRequestHeaders(mockRequest)).toThrow('Content-Type must be application/json');
|
||||
});
|
||||
|
||||
it('should reject large request bodies', () => {
|
||||
const mockRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
'content-length': '2000000'
|
||||
}
|
||||
});
|
||||
|
||||
expect(() => validateRequestHeaders(mockRequest)).toThrow('Request body too large');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Input Sanitization', () => {
|
||||
it('should pass requests without body', () => {
|
||||
delete mockRequest.body;
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
it('should sanitize HTML tags', () => {
|
||||
const input = '<script>alert("xss")</script>Hello';
|
||||
const sanitized = sanitizeValue(input);
|
||||
expect(sanitized).toBe('<script>alert("xss")</script>Hello');
|
||||
});
|
||||
|
||||
it('should sanitize HTML in request body', () => {
|
||||
mockRequest.body = {
|
||||
it('should sanitize nested objects', () => {
|
||||
const input = {
|
||||
text: '<script>alert("xss")</script>Hello',
|
||||
nested: {
|
||||
html: '<img src="x" onerror="alert(1)">World'
|
||||
}
|
||||
};
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockRequest.body.text).toBe('Hello');
|
||||
expect(mockRequest.body.nested.html).toBe('World');
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
const sanitized = sanitizeValue(input);
|
||||
expect(sanitized).toEqual({
|
||||
text: '<script>alert("xss")</script>Hello',
|
||||
nested: {
|
||||
html: '<img src="x" onerror="alert(1)">World'
|
||||
}
|
||||
});
|
||||
|
||||
it('should handle non-object bodies', () => {
|
||||
mockRequest.body = '<p>text</p>';
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockRequest.body).toBe('text');
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve non-string values', () => {
|
||||
mockRequest.body = {
|
||||
number: 42,
|
||||
const input = {
|
||||
number: 123,
|
||||
boolean: true,
|
||||
null: null,
|
||||
array: [1, 2, 3]
|
||||
};
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockRequest.body).toEqual({
|
||||
number: 42,
|
||||
boolean: true,
|
||||
null: null,
|
||||
array: [1, 2, 3]
|
||||
});
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handler', () => {
|
||||
const originalEnv = process.env.NODE_ENV;
|
||||
|
||||
afterAll(() => {
|
||||
process.env.NODE_ENV = originalEnv;
|
||||
});
|
||||
|
||||
it('should handle errors in production mode', () => {
|
||||
process.env.NODE_ENV = 'production';
|
||||
const error = new Error('Test error');
|
||||
errorHandler(error, mockRequest as Request, mockResponse as Response, nextFunction);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Internal Server Error'
|
||||
});
|
||||
});
|
||||
|
||||
it('should include error details in development mode', () => {
|
||||
process.env.NODE_ENV = 'development';
|
||||
const error = new Error('Test error');
|
||||
errorHandler(error, mockRequest as Request, mockResponse as Response, nextFunction);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
error: 'Test error',
|
||||
stack: expect.any(String)
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle non-Error objects', () => {
|
||||
const error = 'String error message';
|
||||
|
||||
errorHandler(
|
||||
error as any,
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction
|
||||
);
|
||||
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Rate Limiter', () => {
|
||||
it('should be configured with correct options', () => {
|
||||
expect(rateLimiter).toBeDefined();
|
||||
const middleware = rateLimiter as any;
|
||||
expect(middleware.windowMs).toBeDefined();
|
||||
expect(middleware.max).toBeDefined();
|
||||
const sanitized = sanitizeValue(input);
|
||||
expect(sanitized).toEqual(input);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Security Headers', () => {
|
||||
it('should set appropriate security headers', () => {
|
||||
securityHeaders(mockRequest as Request, mockResponse as Response, nextFunction);
|
||||
expect(mockResponse.setHeader).toHaveBeenCalledWith('X-Content-Type-Options', 'nosniff');
|
||||
expect(mockResponse.setHeader).toHaveBeenCalledWith('X-Frame-Options', 'DENY');
|
||||
expect(mockResponse.setHeader).toHaveBeenCalledWith('X-XSS-Protection', '1; mode=block');
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
it('should apply security headers', () => {
|
||||
const mockRequest = new Request('http://localhost');
|
||||
const headers = applySecurityHeaders(mockRequest);
|
||||
|
||||
expect(headers).toBeDefined();
|
||||
expect(headers['content-security-policy']).toBeDefined();
|
||||
expect(headers['x-frame-options']).toBeDefined();
|
||||
expect(headers['x-content-type-options']).toBeDefined();
|
||||
expect(headers['referrer-policy']).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should handle errors in production mode', () => {
|
||||
const error = new Error('Test error');
|
||||
const result = handleError(error, 'production');
|
||||
|
||||
expect(result).toEqual({
|
||||
error: true,
|
||||
message: 'Internal server error',
|
||||
timestamp: expect.any(String)
|
||||
});
|
||||
});
|
||||
|
||||
it('should include error details in development mode', () => {
|
||||
const error = new Error('Test error');
|
||||
const result = handleError(error, 'development');
|
||||
|
||||
expect(result).toEqual({
|
||||
error: true,
|
||||
message: 'Internal server error',
|
||||
timestamp: expect.any(String),
|
||||
error: 'Test error',
|
||||
stack: expect.any(String)
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,17 @@
|
||||
import { TokenManager } from '../../src/security/index.js';
|
||||
import jwt from 'jsonwebtoken';
|
||||
|
||||
const TEST_SECRET = 'test-secret-that-is-long-enough-for-testing-purposes';
|
||||
|
||||
describe('TokenManager', () => {
|
||||
beforeAll(() => {
|
||||
process.env.JWT_SECRET = TEST_SECRET;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
delete process.env.JWT_SECRET;
|
||||
});
|
||||
|
||||
const encryptionKey = 'test-encryption-key-32-chars-long!!';
|
||||
const validToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiZXhwIjoxNjE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c';
|
||||
|
||||
@@ -35,27 +46,51 @@ describe('TokenManager', () => {
|
||||
|
||||
describe('Token Validation', () => {
|
||||
it('should validate correct tokens', () => {
|
||||
const validJwt = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiZXhwIjoxNjcyNTI3OTk5fQ.Q6cm_sZS6uqfGqO3LQ-0VqNXhqXR6mFh6IP7s0NPnSQ';
|
||||
expect(TokenManager.validateToken(validJwt)).toBe(true);
|
||||
const payload = { sub: '123', name: 'Test User', iat: Math.floor(Date.now() / 1000), exp: Math.floor(Date.now() / 1000) + 3600 };
|
||||
const token = jwt.sign(payload, TEST_SECRET);
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.error).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should reject expired tokens', () => {
|
||||
const expiredToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiZXhwIjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c';
|
||||
expect(TokenManager.validateToken(expiredToken)).toBe(false);
|
||||
const payload = { sub: '123', name: 'Test User', iat: Math.floor(Date.now() / 1000) - 7200, exp: Math.floor(Date.now() / 1000) - 3600 };
|
||||
const token = jwt.sign(payload, TEST_SECRET);
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Token has expired');
|
||||
});
|
||||
|
||||
it('should reject malformed tokens', () => {
|
||||
expect(TokenManager.validateToken('invalid-token')).toBe(false);
|
||||
const result = TokenManager.validateToken('invalid-token');
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Token length below minimum requirement');
|
||||
});
|
||||
|
||||
it('should reject tokens with invalid signature', () => {
|
||||
const tamperedToken = validToken.slice(0, -5) + 'xxxxx';
|
||||
expect(TokenManager.validateToken(tamperedToken)).toBe(false);
|
||||
const payload = { sub: '123', name: 'Test User', iat: Math.floor(Date.now() / 1000), exp: Math.floor(Date.now() / 1000) + 3600 };
|
||||
const token = jwt.sign(payload, 'different-secret');
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Invalid token signature');
|
||||
});
|
||||
|
||||
it('should handle tokens with missing expiration', () => {
|
||||
const noExpToken = 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIn0.Q6cm_sZS6uqfGqO3LQ-0VqNXhqXR6mFh6IP7s0NPnSQ';
|
||||
expect(TokenManager.validateToken(noExpToken)).toBe(false);
|
||||
const payload = { sub: '123', name: 'Test User' };
|
||||
const token = jwt.sign(payload, TEST_SECRET);
|
||||
const result = TokenManager.validateToken(token);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe('Token missing required claims');
|
||||
});
|
||||
|
||||
it('should handle undefined and null inputs', () => {
|
||||
const undefinedResult = TokenManager.validateToken(undefined);
|
||||
expect(undefinedResult.valid).toBe(false);
|
||||
expect(undefinedResult.error).toBe('Invalid token format');
|
||||
|
||||
const nullResult = TokenManager.validateToken(null);
|
||||
expect(nullResult.valid).toBe(false);
|
||||
expect(nullResult.error).toBe('Invalid token format');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -103,10 +138,5 @@ describe('TokenManager', () => {
|
||||
it('should handle invalid base64 input', () => {
|
||||
expect(() => TokenManager.decryptToken('not-base64!@#$%^', encryptionKey)).toThrow();
|
||||
});
|
||||
|
||||
it('should handle undefined and null inputs', () => {
|
||||
expect(TokenManager.validateToken(undefined as any)).toBe(false);
|
||||
expect(TokenManager.validateToken(null as any)).toBe(false);
|
||||
});
|
||||
});
|
||||
});
|
||||
570
bun.lock
Executable file
570
bun.lock
Executable file
@@ -0,0 +1,570 @@
|
||||
{
|
||||
"lockfileVersion": 0,
|
||||
"workspaces": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"@elysiajs/cors": "^1.2.0",
|
||||
"@elysiajs/swagger": "^1.2.0",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/sanitize-html": "^2.9.5",
|
||||
"@types/ws": "^8.5.10",
|
||||
"dotenv": "^16.4.5",
|
||||
"elysia": "^1.2.11",
|
||||
"helmet": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"node-fetch": "^3.3.2",
|
||||
"sanitize-html": "^2.11.0",
|
||||
"typescript": "^5.3.3",
|
||||
"winston": "^3.11.0",
|
||||
"winston-daily-rotate-file": "^5.0.0",
|
||||
"ws": "^8.16.0",
|
||||
"zod": "^3.22.4",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.0",
|
||||
"@typescript-eslint/parser": "^7.1.0",
|
||||
"bun-types": "^1.2.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"husky": "^9.0.11",
|
||||
"prettier": "^3.2.5",
|
||||
"supertest": "^6.3.3",
|
||||
"uuid": "^11.0.5",
|
||||
},
|
||||
},
|
||||
},
|
||||
"packages": {
|
||||
"@colors/colors": ["@colors/colors@1.6.0", "", {}, "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA=="],
|
||||
|
||||
"@dabh/diagnostics": ["@dabh/diagnostics@2.0.3", "", { "dependencies": { "colorspace": "1.1.x", "enabled": "2.0.x", "kuler": "^2.0.0" } }, "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA=="],
|
||||
|
||||
"@elysiajs/cors": ["@elysiajs/cors@1.2.0", "", { "peerDependencies": { "elysia": ">= 1.2.0" } }, "sha512-qsJwDAg6WfdQRMfj6uSMcDPSpXvm/zQFeAX1uuJXhIgazH8itSfcDxcH9pMuXVRX1yQNi2pPwNQLJmAcw5mzvw=="],
|
||||
|
||||
"@elysiajs/swagger": ["@elysiajs/swagger@1.2.0", "", { "dependencies": { "@scalar/themes": "^0.9.52", "@scalar/types": "^0.0.12", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.2.0" } }, "sha512-OPx93DP6rM2VHjA3D44Xiz5MYm9AYlO2NGWPsnSsdyvaOCiL9wJj529583h7arX4iIEYE5LiLB0/A45unqbopw=="],
|
||||
|
||||
"@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA=="],
|
||||
|
||||
"@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="],
|
||||
|
||||
"@eslint/eslintrc": ["@eslint/eslintrc@2.1.4", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ=="],
|
||||
|
||||
"@eslint/js": ["@eslint/js@8.57.1", "", {}, "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q=="],
|
||||
|
||||
"@humanwhocodes/config-array": ["@humanwhocodes/config-array@0.13.0", "", { "dependencies": { "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" } }, "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw=="],
|
||||
|
||||
"@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="],
|
||||
|
||||
"@humanwhocodes/object-schema": ["@humanwhocodes/object-schema@2.0.3", "", {}, "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA=="],
|
||||
|
||||
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
|
||||
|
||||
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
|
||||
|
||||
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
|
||||
|
||||
"@pkgr/core": ["@pkgr/core@0.1.1", "", {}, "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA=="],
|
||||
|
||||
"@scalar/openapi-types": ["@scalar/openapi-types@0.1.1", "", {}, "sha512-NMy3QNk6ytcCoPUGJH0t4NNr36OWXgZhA3ormr3TvhX1NDgoF95wFyodGVH8xiHeUyn2/FxtETm8UBLbB5xEmg=="],
|
||||
|
||||
"@scalar/themes": ["@scalar/themes@0.9.64", "", { "dependencies": { "@scalar/types": "0.0.30" } }, "sha512-hr9bCTdH9M/N8w31Td+IJVtbH+v0Ej31myW8QWhUfwYZe5qS815Tl1mp+qWFaObstOw5VX3zOtiZuuhF1zMIyw=="],
|
||||
|
||||
"@scalar/types": ["@scalar/types@0.0.12", "", { "dependencies": { "@scalar/openapi-types": "0.1.1", "@unhead/schema": "^1.9.5" } }, "sha512-XYZ36lSEx87i4gDqopQlGCOkdIITHHEvgkuJFrXFATQs9zHARop0PN0g4RZYWj+ZpCUclOcaOjbCt8JGe22mnQ=="],
|
||||
|
||||
"@sinclair/typebox": ["@sinclair/typebox@0.34.15", "", {}, "sha512-xeIzl3h1Znn9w/LTITqpiwag0gXjA+ldi2ZkXIBxGEppGCW211Tza+eL6D4pKqs10bj5z2umBWk5WL6spQ2OCQ=="],
|
||||
|
||||
"@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.8", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-7fx54m60nLFUVYlxAB1xpe9CBWX2vSrk50Y6ogRJ1v5xxtba7qXTg5BgYDN5dq+yuQQ9HaVlHJyAAt1/mxryFg=="],
|
||||
|
||||
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||
|
||||
"@types/node": ["@types/node@20.17.17", "", { "dependencies": { "undici-types": "~6.19.2" } }, "sha512-/WndGO4kIfMicEQLTi/mDANUu/iVUhT7KboZPdEqqHQ4aTS+3qT3U5gIqWDFV+XouorjfgGqvKILJeHhuQgFYg=="],
|
||||
|
||||
"@types/sanitize-html": ["@types/sanitize-html@2.13.0", "", { "dependencies": { "htmlparser2": "^8.0.0" } }, "sha512-X31WxbvW9TjIhZZNyNBZ/p5ax4ti7qsNDBDEnH4zAgmEh35YnFD1UiS6z9Cd34kKm0LslFW0KPmTQzu/oGtsqQ=="],
|
||||
|
||||
"@types/triple-beam": ["@types/triple-beam@1.3.5", "", {}, "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw=="],
|
||||
|
||||
"@types/uuid": ["@types/uuid@10.0.0", "", {}, "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ=="],
|
||||
|
||||
"@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="],
|
||||
|
||||
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@7.18.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/type-utils": "7.18.0", "@typescript-eslint/utils": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "@typescript-eslint/parser": "^7.0.0", "eslint": "^8.56.0" } }, "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw=="],
|
||||
|
||||
"@typescript-eslint/parser": ["@typescript-eslint/parser@7.18.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", "@typescript-eslint/typescript-estree": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg=="],
|
||||
|
||||
"@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0" } }, "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA=="],
|
||||
|
||||
"@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@7.18.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "7.18.0", "@typescript-eslint/utils": "7.18.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA=="],
|
||||
|
||||
"@typescript-eslint/types": ["@typescript-eslint/types@7.18.0", "", {}, "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^1.3.0" } }, "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA=="],
|
||||
|
||||
"@typescript-eslint/utils": ["@typescript-eslint/utils@7.18.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", "@typescript-eslint/typescript-estree": "7.18.0" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw=="],
|
||||
|
||||
"@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "eslint-visitor-keys": "^3.4.3" } }, "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg=="],
|
||||
|
||||
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||
|
||||
"@unhead/schema": ["@unhead/schema@1.11.18", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-a3TA/OJCRdfbFhcA3Hq24k1ZU1o9szicESrw8DZcGyQFacHnh84mVgnyqSkMnwgCmfN4kvjSiTBlLEHS6+wATw=="],
|
||||
|
||||
"acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||
|
||||
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
|
||||
|
||||
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
|
||||
|
||||
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||
|
||||
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
|
||||
|
||||
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||
|
||||
"array-union": ["array-union@2.1.0", "", {}, "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="],
|
||||
|
||||
"asap": ["asap@2.0.6", "", {}, "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="],
|
||||
|
||||
"async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="],
|
||||
|
||||
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||
|
||||
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
|
||||
|
||||
"brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="],
|
||||
|
||||
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
|
||||
|
||||
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||
|
||||
"bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="],
|
||||
|
||||
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="],
|
||||
|
||||
"call-bound": ["call-bound@1.0.3", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "get-intrinsic": "^1.2.6" } }, "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA=="],
|
||||
|
||||
"callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="],
|
||||
|
||||
"chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
|
||||
|
||||
"color": ["color@3.2.1", "", { "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" } }, "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA=="],
|
||||
|
||||
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||
|
||||
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||
|
||||
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||
|
||||
"colorspace": ["colorspace@1.1.4", "", { "dependencies": { "color": "^3.1.3", "text-hex": "1.0.x" } }, "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w=="],
|
||||
|
||||
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||
|
||||
"component-emitter": ["component-emitter@1.3.1", "", {}, "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ=="],
|
||||
|
||||
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
|
||||
|
||||
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||
|
||||
"cookiejar": ["cookiejar@2.1.4", "", {}, "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="],
|
||||
|
||||
"cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
|
||||
|
||||
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||
|
||||
"debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="],
|
||||
|
||||
"deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="],
|
||||
|
||||
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
|
||||
|
||||
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||
|
||||
"dezalgo": ["dezalgo@1.0.4", "", { "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig=="],
|
||||
|
||||
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||
|
||||
"doctrine": ["doctrine@3.0.0", "", { "dependencies": { "esutils": "^2.0.2" } }, "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w=="],
|
||||
|
||||
"dom-serializer": ["dom-serializer@2.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg=="],
|
||||
|
||||
"domelementtype": ["domelementtype@2.3.0", "", {}, "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="],
|
||||
|
||||
"domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="],
|
||||
|
||||
"domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="],
|
||||
|
||||
"dotenv": ["dotenv@16.4.7", "", {}, "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ=="],
|
||||
|
||||
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
|
||||
|
||||
"ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
|
||||
|
||||
"elysia": ["elysia@1.2.12", "", { "dependencies": { "@sinclair/typebox": "^0.34.15", "cookie": "^1.0.2", "memoirist": "^0.3.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-X1bZo09qe8/Poa/5tz08Y+sE/77B/wLwnA5xDDENU3FCrsUtYJuBVcy6BPXGRCgnJ1fPQpc0Ov2ZU5MYJXluTg=="],
|
||||
|
||||
"enabled": ["enabled@2.0.0", "", {}, "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ=="],
|
||||
|
||||
"entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||
|
||||
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||
|
||||
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||
|
||||
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="],
|
||||
|
||||
"eslint": ["eslint@8.57.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", "@eslint/js": "8.57.1", "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", "eslint-visitor-keys": "^3.4.3", "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA=="],
|
||||
|
||||
"eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="],
|
||||
|
||||
"eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.3", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.9.1" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw=="],
|
||||
|
||||
"eslint-scope": ["eslint-scope@7.2.2", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg=="],
|
||||
|
||||
"eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
|
||||
|
||||
"espree": ["espree@9.6.1", "", { "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" } }, "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ=="],
|
||||
|
||||
"esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="],
|
||||
|
||||
"esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="],
|
||||
|
||||
"estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
|
||||
|
||||
"esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="],
|
||||
|
||||
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||
|
||||
"fast-diff": ["fast-diff@1.3.0", "", {}, "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw=="],
|
||||
|
||||
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
|
||||
|
||||
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
|
||||
|
||||
"fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="],
|
||||
|
||||
"fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="],
|
||||
|
||||
"fastq": ["fastq@1.19.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-7SFSRCNjBQIZH/xZR3iy5iQYR8aGBE0h3VG6/cwlbrpdciNYBMotQav8c1XI3HjHH+NikUpP53nPdlZSdWmFzA=="],
|
||||
|
||||
"fecha": ["fecha@4.2.3", "", {}, "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw=="],
|
||||
|
||||
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
|
||||
|
||||
"file-entry-cache": ["file-entry-cache@6.0.1", "", { "dependencies": { "flat-cache": "^3.0.4" } }, "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg=="],
|
||||
|
||||
"file-stream-rotator": ["file-stream-rotator@0.6.1", "", { "dependencies": { "moment": "^2.29.1" } }, "sha512-u+dBid4PvZw17PmDeRcNOtCP9CCK/9lRN2w+r1xIS7yOL9JFrIBKTvrYsxT4P0pGtThYTn++QS5ChHaUov3+zQ=="],
|
||||
|
||||
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||
|
||||
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
|
||||
|
||||
"flat-cache": ["flat-cache@3.2.0", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", "rimraf": "^3.0.2" } }, "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw=="],
|
||||
|
||||
"flatted": ["flatted@3.3.2", "", {}, "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA=="],
|
||||
|
||||
"fn.name": ["fn.name@1.1.0", "", {}, "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="],
|
||||
|
||||
"form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="],
|
||||
|
||||
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
|
||||
|
||||
"formidable": ["formidable@2.1.2", "", { "dependencies": { "dezalgo": "^1.0.4", "hexoid": "^1.0.0", "once": "^1.4.0", "qs": "^6.11.0" } }, "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g=="],
|
||||
|
||||
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
|
||||
|
||||
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||
|
||||
"get-intrinsic": ["get-intrinsic@1.2.7", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "get-proto": "^1.0.0", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA=="],
|
||||
|
||||
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
|
||||
|
||||
"glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="],
|
||||
|
||||
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
|
||||
|
||||
"globals": ["globals@13.24.0", "", { "dependencies": { "type-fest": "^0.20.2" } }, "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ=="],
|
||||
|
||||
"globby": ["globby@11.1.0", "", { "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" } }, "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g=="],
|
||||
|
||||
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||
|
||||
"graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="],
|
||||
|
||||
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||
|
||||
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||
|
||||
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||
|
||||
"helmet": ["helmet@7.2.0", "", {}, "sha512-ZRiwvN089JfMXokizgqEPXsl2Guk094yExfoDXR0cBYWxtBbaSww/w+vT4WEJsBW2iTUi1GgZ6swmoug3Oy4Xw=="],
|
||||
|
||||
"hexoid": ["hexoid@1.0.0", "", {}, "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g=="],
|
||||
|
||||
"hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="],
|
||||
|
||||
"htmlparser2": ["htmlparser2@8.0.2", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "entities": "^4.4.0" } }, "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA=="],
|
||||
|
||||
"husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="],
|
||||
|
||||
"ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
|
||||
|
||||
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],
|
||||
|
||||
"imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="],
|
||||
|
||||
"inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="],
|
||||
|
||||
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
|
||||
|
||||
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||
|
||||
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||
|
||||
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||
|
||||
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||
|
||||
"is-path-inside": ["is-path-inside@3.0.3", "", {}, "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="],
|
||||
|
||||
"is-plain-object": ["is-plain-object@5.0.0", "", {}, "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="],
|
||||
|
||||
"is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
|
||||
|
||||
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||
|
||||
"js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
|
||||
|
||||
"json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="],
|
||||
|
||||
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||
|
||||
"json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="],
|
||||
|
||||
"jsonwebtoken": ["jsonwebtoken@9.0.2", "", { "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ=="],
|
||||
|
||||
"jwa": ["jwa@1.4.1", "", { "dependencies": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA=="],
|
||||
|
||||
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||
|
||||
"keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="],
|
||||
|
||||
"kuler": ["kuler@2.0.0", "", {}, "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A=="],
|
||||
|
||||
"levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="],
|
||||
|
||||
"locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="],
|
||||
|
||||
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||
|
||||
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
|
||||
|
||||
"lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
|
||||
|
||||
"lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
|
||||
|
||||
"lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
|
||||
|
||||
"lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
|
||||
|
||||
"lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="],
|
||||
|
||||
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
|
||||
|
||||
"logform": ["logform@2.7.0", "", { "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", "fecha": "^4.2.0", "ms": "^2.1.1", "safe-stable-stringify": "^2.3.1", "triple-beam": "^1.3.0" } }, "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ=="],
|
||||
|
||||
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
|
||||
|
||||
"memoirist": ["memoirist@0.3.0", "", {}, "sha512-wR+4chMgVPq+T6OOsk40u9Wlpw1Pjx66NMNiYxCQQ4EUJ7jDs3D9kTCeKdBOkvAiqXlHLVJlvYL01PvIJ1MPNg=="],
|
||||
|
||||
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||
|
||||
"methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="],
|
||||
|
||||
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
|
||||
|
||||
"mime": ["mime@2.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg=="],
|
||||
|
||||
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||
|
||||
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||
|
||||
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
|
||||
|
||||
"moment": ["moment@2.30.1", "", {}, "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how=="],
|
||||
|
||||
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||
|
||||
"nanoid": ["nanoid@3.3.8", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w=="],
|
||||
|
||||
"natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="],
|
||||
|
||||
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
|
||||
|
||||
"node-fetch": ["node-fetch@3.3.2", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="],
|
||||
|
||||
"object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
|
||||
|
||||
"object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="],
|
||||
|
||||
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||
|
||||
"one-time": ["one-time@1.0.0", "", { "dependencies": { "fn.name": "1.x.x" } }, "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g=="],
|
||||
|
||||
"openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
|
||||
|
||||
"optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="],
|
||||
|
||||
"p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
|
||||
|
||||
"p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="],
|
||||
|
||||
"parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="],
|
||||
|
||||
"parse-srcset": ["parse-srcset@1.0.2", "", {}, "sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q=="],
|
||||
|
||||
"path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="],
|
||||
|
||||
"path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="],
|
||||
|
||||
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||
|
||||
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||
|
||||
"pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
|
||||
|
||||
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||
|
||||
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||
|
||||
"postcss": ["postcss@8.5.1", "", { "dependencies": { "nanoid": "^3.3.8", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ=="],
|
||||
|
||||
"prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="],
|
||||
|
||||
"prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="],
|
||||
|
||||
"prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="],
|
||||
|
||||
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||
|
||||
"qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="],
|
||||
|
||||
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||
|
||||
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||
|
||||
"resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="],
|
||||
|
||||
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
|
||||
|
||||
"rimraf": ["rimraf@3.0.2", "", { "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" } }, "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA=="],
|
||||
|
||||
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||
|
||||
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||
|
||||
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||
|
||||
"sanitize-html": ["sanitize-html@2.14.0", "", { "dependencies": { "deepmerge": "^4.2.2", "escape-string-regexp": "^4.0.0", "htmlparser2": "^8.0.0", "is-plain-object": "^5.0.0", "parse-srcset": "^1.0.2", "postcss": "^8.3.11" } }, "sha512-CafX+IUPxZshXqqRaG9ZClSlfPVjSxI0td7n07hk8QO2oO+9JDnlcL8iM8TWeOXOIBFgIOx6zioTzM53AOMn3g=="],
|
||||
|
||||
"semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="],
|
||||
|
||||
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||
|
||||
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||
|
||||
"side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="],
|
||||
|
||||
"side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="],
|
||||
|
||||
"side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="],
|
||||
|
||||
"side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="],
|
||||
|
||||
"simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
|
||||
|
||||
"slash": ["slash@3.0.0", "", {}, "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="],
|
||||
|
||||
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||
|
||||
"stack-trace": ["stack-trace@0.0.10", "", {}, "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg=="],
|
||||
|
||||
"string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
|
||||
|
||||
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||
|
||||
"strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="],
|
||||
|
||||
"superagent": ["superagent@8.1.2", "", { "dependencies": { "component-emitter": "^1.3.0", "cookiejar": "^2.1.4", "debug": "^4.3.4", "fast-safe-stringify": "^2.1.1", "form-data": "^4.0.0", "formidable": "^2.1.2", "methods": "^1.1.2", "mime": "2.6.0", "qs": "^6.11.0", "semver": "^7.3.8" } }, "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA=="],
|
||||
|
||||
"supertest": ["supertest@6.3.4", "", { "dependencies": { "methods": "^1.1.2", "superagent": "^8.1.2" } }, "sha512-erY3HFDG0dPnhw4U+udPfrzXa4xhSG+n4rxfRuZWCUvjFWwKl+OxWf/7zk50s84/fAAs7vf5QAb9uRa0cCykxw=="],
|
||||
|
||||
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
|
||||
|
||||
"synckit": ["synckit@0.9.2", "", { "dependencies": { "@pkgr/core": "^0.1.0", "tslib": "^2.6.2" } }, "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw=="],
|
||||
|
||||
"text-hex": ["text-hex@1.0.0", "", {}, "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg=="],
|
||||
|
||||
"text-table": ["text-table@0.2.0", "", {}, "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="],
|
||||
|
||||
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||
|
||||
"triple-beam": ["triple-beam@1.4.1", "", {}, "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg=="],
|
||||
|
||||
"ts-api-utils": ["ts-api-utils@1.4.3", "", { "peerDependencies": { "typescript": ">=4.2.0" } }, "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw=="],
|
||||
|
||||
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||
|
||||
"type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="],
|
||||
|
||||
"type-fest": ["type-fest@0.20.2", "", {}, "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ=="],
|
||||
|
||||
"typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="],
|
||||
|
||||
"undici-types": ["undici-types@6.19.8", "", {}, "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="],
|
||||
|
||||
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||
|
||||
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
|
||||
|
||||
"uuid": ["uuid@11.0.5", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA=="],
|
||||
|
||||
"web-streams-polyfill": ["web-streams-polyfill@3.3.3", "", {}, "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="],
|
||||
|
||||
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
"winston": ["winston@3.17.0", "", { "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", "logform": "^2.7.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", "stack-trace": "0.0.x", "triple-beam": "^1.3.0", "winston-transport": "^4.9.0" } }, "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw=="],
|
||||
|
||||
"winston-daily-rotate-file": ["winston-daily-rotate-file@5.0.0", "", { "dependencies": { "file-stream-rotator": "^0.6.1", "object-hash": "^3.0.0", "triple-beam": "^1.4.1", "winston-transport": "^4.7.0" }, "peerDependencies": { "winston": "^3" } }, "sha512-JDjiXXkM5qvwY06733vf09I2wnMXpZEhxEVOSPenZMii+g7pcDcTBt2MRugnoi8BwVSuCT2jfRXBUy+n1Zz/Yw=="],
|
||||
|
||||
"winston-transport": ["winston-transport@4.9.0", "", { "dependencies": { "logform": "^2.7.0", "readable-stream": "^3.6.2", "triple-beam": "^1.3.0" } }, "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A=="],
|
||||
|
||||
"word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="],
|
||||
|
||||
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||
|
||||
"ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="],
|
||||
|
||||
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
|
||||
|
||||
"zhead": ["zhead@2.2.4", "", {}, "sha512-8F0OI5dpWIA5IGG5NHUg9staDwz/ZPxZtvGVf01j7vHqSyZ0raHY+78atOVxRqb73AotX22uV1pXt3gYSstGag=="],
|
||||
|
||||
"zod": ["zod@3.24.1", "", {}, "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A=="],
|
||||
|
||||
"@scalar/themes/@scalar/types": ["@scalar/types@0.0.30", "", { "dependencies": { "@scalar/openapi-types": "0.1.7", "@unhead/schema": "^1.11.11" } }, "sha512-rhgwovQb5f7PXuUB5bLUElpo90fdsiwcOgBXVWZ6n6dnFSKovNJ7GPXQimsZioMzTF6TdwfP94UpZVdZAK4aTw=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
|
||||
|
||||
"color/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||
|
||||
"color-string/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
|
||||
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||
|
||||
"@scalar/themes/@scalar/types/@scalar/openapi-types": ["@scalar/openapi-types@0.1.7", "", {}, "sha512-oOTG3JQifg55U3DhKB7WdNIxFnJzbPJe7rqdyWdio977l8IkxQTVmObftJhdNIMvhV2K+1f/bDoMQGu6yTaD0A=="],
|
||||
|
||||
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
|
||||
|
||||
"color/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||
}
|
||||
}
|
||||
50
bunfig.toml
Normal file
50
bunfig.toml
Normal file
@@ -0,0 +1,50 @@
|
||||
[test]
|
||||
preload = ["./src/__tests__/setup.ts"]
|
||||
coverage = true
|
||||
coverageThreshold = {
|
||||
statements = 80,
|
||||
branches = 70,
|
||||
functions = 80,
|
||||
lines = 80
|
||||
}
|
||||
timeout = 30000
|
||||
testMatch = ["**/__tests__/**/*.test.ts"]
|
||||
testPathIgnorePatterns = ["/node_modules/", "/dist/"]
|
||||
collectCoverageFrom = [
|
||||
"src/**/*.{ts,tsx}",
|
||||
"!src/**/*.d.ts",
|
||||
"!src/**/*.test.ts",
|
||||
"!src/types/**/*",
|
||||
"!src/mocks/**/*"
|
||||
]
|
||||
|
||||
[build]
|
||||
target = "node"
|
||||
outdir = "./dist"
|
||||
minify = true
|
||||
sourcemap = "external"
|
||||
|
||||
[install]
|
||||
production = false
|
||||
frozen = true
|
||||
peer = false
|
||||
|
||||
[install.cache]
|
||||
dir = ".bun"
|
||||
disable = false
|
||||
|
||||
[debug]
|
||||
port = 9229
|
||||
|
||||
[env]
|
||||
# Environment-specific configurations
|
||||
development.LOG_LEVEL = "debug"
|
||||
production.LOG_LEVEL = "warn"
|
||||
|
||||
[hot]
|
||||
restart = true
|
||||
reload = true
|
||||
|
||||
[performance]
|
||||
gc = true
|
||||
optimize = true
|
||||
64
docker-build.sh
Executable file
64
docker-build.sh
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Enable error handling
|
||||
set -euo pipefail
|
||||
|
||||
# Function to clean up on script exit
|
||||
cleanup() {
|
||||
echo "Cleaning up..."
|
||||
docker builder prune -f --filter until=24h
|
||||
docker image prune -f
|
||||
}
|
||||
trap cleanup EXIT
|
||||
|
||||
# Clean up Docker system
|
||||
echo "Cleaning up Docker system..."
|
||||
docker system prune -f --volumes
|
||||
|
||||
# Set build arguments for better performance
|
||||
export DOCKER_BUILDKIT=1
|
||||
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||
export BUILDKIT_PROGRESS=plain
|
||||
|
||||
# Calculate available memory and CPU
|
||||
TOTAL_MEM=$(free -m | awk '/^Mem:/{print $2}')
|
||||
BUILD_MEM=$(( TOTAL_MEM / 2 )) # Use half of available memory
|
||||
CPU_COUNT=$(nproc)
|
||||
CPU_QUOTA=$(( CPU_COUNT * 50000 )) # Allow 50% CPU usage per core
|
||||
|
||||
echo "Building with ${BUILD_MEM}MB memory limit and CPU quota ${CPU_QUOTA}"
|
||||
|
||||
# Remove any existing lockfile
|
||||
rm -f bun.lockb
|
||||
|
||||
# Build with resource limits, optimizations, and timeout
|
||||
echo "Building Docker image..."
|
||||
DOCKER_BUILDKIT=1 docker build \
|
||||
--memory="${BUILD_MEM}m" \
|
||||
--memory-swap="${BUILD_MEM}m" \
|
||||
--cpu-quota="${CPU_QUOTA}" \
|
||||
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||
--build-arg DOCKER_BUILDKIT=1 \
|
||||
--build-arg NODE_ENV=production \
|
||||
--progress=plain \
|
||||
--no-cache \
|
||||
--compress \
|
||||
-t homeassistant-mcp:latest \
|
||||
-t homeassistant-mcp:$(date +%Y%m%d) \
|
||||
.
|
||||
|
||||
# Check if build was successful
|
||||
BUILD_EXIT_CODE=$?
|
||||
if [ $BUILD_EXIT_CODE -eq 124 ]; then
|
||||
echo "Build timed out after 15 minutes!"
|
||||
exit 1
|
||||
elif [ $BUILD_EXIT_CODE -ne 0 ]; then
|
||||
echo "Build failed with exit code ${BUILD_EXIT_CODE}!"
|
||||
exit 1
|
||||
else
|
||||
echo "Build completed successfully!"
|
||||
|
||||
# Show image size and layers
|
||||
docker image ls homeassistant-mcp:latest --format "Image size: {{.Size}}"
|
||||
echo "Layer count: $(docker history homeassistant-mcp:latest | wc -l)"
|
||||
fi
|
||||
68
docker/speech/Dockerfile
Normal file
68
docker/speech/Dockerfile
Normal file
@@ -0,0 +1,68 @@
|
||||
# Use Python slim image as builder
|
||||
FROM python:3.10-slim as builder
|
||||
|
||||
# Install build dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
git \
|
||||
build-essential \
|
||||
portaudio19-dev \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create and activate virtual environment
|
||||
RUN python -m venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Install Python dependencies with specific versions and CPU-only variants
|
||||
RUN pip install --no-cache-dir "numpy>=1.24.3,<2.0.0" && \
|
||||
pip install --no-cache-dir torch==2.1.2 torchaudio==2.1.2 --index-url https://download.pytorch.org/whl/cpu && \
|
||||
pip install --no-cache-dir faster-whisper==0.10.0 openwakeword==0.4.0 pyaudio==0.2.14 sounddevice==0.4.6 requests==2.31.0 && \
|
||||
pip freeze > /opt/venv/requirements.txt
|
||||
|
||||
# Create final image
|
||||
FROM python:3.10-slim
|
||||
|
||||
# Copy virtual environment from builder
|
||||
COPY --from=builder /opt/venv /opt/venv
|
||||
ENV PATH="/opt/venv/bin:$PATH"
|
||||
|
||||
# Install audio dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
portaudio19-dev \
|
||||
python3-pyaudio \
|
||||
alsa-utils \
|
||||
libasound2 \
|
||||
libasound2-plugins \
|
||||
pulseaudio \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Create necessary directories
|
||||
RUN mkdir -p /models/wake_word /audio
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the wake word detection script
|
||||
COPY wake_word_detector.py .
|
||||
|
||||
# Set environment variables
|
||||
ENV WHISPER_MODEL_PATH=/models \
|
||||
WAKEWORD_MODEL_PATH=/models/wake_word \
|
||||
PYTHONUNBUFFERED=1 \
|
||||
ASR_MODEL=base.en \
|
||||
ASR_MODEL_PATH=/models
|
||||
|
||||
# Add resource limits to Python
|
||||
ENV PYTHONMALLOC=malloc \
|
||||
MALLOC_TRIM_THRESHOLD_=100000 \
|
||||
PYTHONDEVMODE=1
|
||||
|
||||
# Add healthcheck
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD ps aux | grep '[p]ython' || exit 1
|
||||
|
||||
# Copy audio setup script
|
||||
COPY setup-audio.sh /setup-audio.sh
|
||||
RUN chmod +x /setup-audio.sh
|
||||
|
||||
# Start command
|
||||
CMD ["/bin/bash", "-c", "/setup-audio.sh && python -u wake_word_detector.py"]
|
||||
16
docker/speech/setup-audio.sh
Executable file
16
docker/speech/setup-audio.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Wait for PulseAudio to be ready
|
||||
sleep 2
|
||||
|
||||
# Mute the monitor to prevent feedback
|
||||
pactl set-source-mute alsa_output.pci-0000_00_1b.0.analog-stereo.monitor 1
|
||||
|
||||
# Set microphone sensitivity to 65%
|
||||
pactl set-source-volume alsa_input.pci-0000_00_1b.0.analog-stereo 65%
|
||||
|
||||
# Set speaker volume to 40%
|
||||
pactl set-sink-volume alsa_output.pci-0000_00_1b.0.analog-stereo 40%
|
||||
|
||||
# Make the script executable
|
||||
chmod +x /setup-audio.sh
|
||||
415
docker/speech/wake_word_detector.py
Normal file
415
docker/speech/wake_word_detector.py
Normal file
@@ -0,0 +1,415 @@
|
||||
import os
|
||||
import json
|
||||
import queue
|
||||
import threading
|
||||
import numpy as np
|
||||
import sounddevice as sd
|
||||
from openwakeword import Model
|
||||
from datetime import datetime
|
||||
import wave
|
||||
from faster_whisper import WhisperModel
|
||||
import requests
|
||||
import logging
|
||||
import time
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Configuration
|
||||
SAMPLE_RATE = 16000
|
||||
CHANNELS = 1
|
||||
CHUNK_SIZE = 1024
|
||||
BUFFER_DURATION = 10 # seconds to keep in buffer
|
||||
DETECTION_THRESHOLD = 0.5
|
||||
CONTINUOUS_TRANSCRIPTION_INTERVAL = 3 # seconds between transcriptions
|
||||
MAX_MODEL_LOAD_RETRIES = 3
|
||||
MODEL_LOAD_RETRY_DELAY = 5 # seconds
|
||||
MODEL_DOWNLOAD_TIMEOUT = 600 # 10 minutes timeout for model download
|
||||
|
||||
# Audio processing parameters
|
||||
NOISE_THRESHOLD = 0.08 # Increased threshold for better noise filtering
|
||||
MIN_SPEECH_DURATION = 2.0 # Longer minimum duration to avoid fragments
|
||||
SILENCE_DURATION = 1.0 # Longer silence duration
|
||||
MAX_REPETITIONS = 1 # More aggressive repetition filtering
|
||||
ECHO_THRESHOLD = 0.75 # More sensitive echo detection
|
||||
MIN_SEGMENT_DURATION = 1.0 # Longer minimum segment duration
|
||||
FEEDBACK_WINDOW = 5 # Window size for feedback detection in seconds
|
||||
|
||||
# Feature flags from environment
|
||||
WAKE_WORD_ENABLED = os.environ.get('ENABLE_WAKE_WORD', 'false').lower() == 'true'
|
||||
SPEECH_ENABLED = os.environ.get('ENABLE_SPEECH_FEATURES', 'true').lower() == 'true'
|
||||
|
||||
# Wake word models to use (only if wake word is enabled)
|
||||
WAKE_WORDS = ["alexa"] # Using 'alexa' as temporary replacement for 'gaja'
|
||||
WAKE_WORD_ALIAS = "gaja" # What we print when wake word is detected
|
||||
|
||||
# Home Assistant Configuration
|
||||
HASS_HOST = os.environ.get('HASS_HOST', 'http://homeassistant.local:8123')
|
||||
HASS_TOKEN = os.environ.get('HASS_TOKEN')
|
||||
|
||||
def initialize_asr_model():
|
||||
"""Initialize the ASR model with retries and timeout"""
|
||||
model_path = os.environ.get('ASR_MODEL_PATH', '/models')
|
||||
model_name = os.environ.get('ASR_MODEL', 'large-v3')
|
||||
|
||||
start_time = time.time()
|
||||
for attempt in range(MAX_MODEL_LOAD_RETRIES):
|
||||
try:
|
||||
if time.time() - start_time > MODEL_DOWNLOAD_TIMEOUT:
|
||||
logger.error("Model download timeout exceeded")
|
||||
raise TimeoutError("Model download took too long")
|
||||
|
||||
logger.info(f"Loading ASR model (attempt {attempt + 1}/{MAX_MODEL_LOAD_RETRIES})")
|
||||
model = WhisperModel(
|
||||
model_size_or_path=model_name,
|
||||
device="cpu",
|
||||
compute_type="int8",
|
||||
download_root=model_path,
|
||||
num_workers=1 # Reduce concurrent downloads
|
||||
)
|
||||
logger.info("ASR model loaded successfully")
|
||||
return model
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load ASR model (attempt {attempt + 1}): {e}")
|
||||
if attempt < MAX_MODEL_LOAD_RETRIES - 1:
|
||||
logger.info(f"Retrying in {MODEL_LOAD_RETRY_DELAY} seconds...")
|
||||
time.sleep(MODEL_LOAD_RETRY_DELAY)
|
||||
else:
|
||||
logger.error("Failed to load ASR model after all retries")
|
||||
raise
|
||||
|
||||
# Initialize the ASR model with retries
|
||||
try:
|
||||
asr_model = initialize_asr_model()
|
||||
except Exception as e:
|
||||
logger.error(f"Critical error initializing ASR model: {e}")
|
||||
raise
|
||||
|
||||
def send_command_to_hass(domain, service, entity_id):
|
||||
"""Send command to Home Assistant"""
|
||||
if not HASS_TOKEN:
|
||||
logger.error("Error: HASS_TOKEN not set")
|
||||
return False
|
||||
|
||||
headers = {
|
||||
"Authorization": f"Bearer {HASS_TOKEN}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
|
||||
url = f"{HASS_HOST}/api/services/{domain}/{service}"
|
||||
data = {"entity_id": entity_id}
|
||||
|
||||
try:
|
||||
response = requests.post(url, headers=headers, json=data)
|
||||
response.raise_for_status()
|
||||
logger.info(f"Command sent: {domain}.{service} for {entity_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending command to Home Assistant: {e}")
|
||||
return False
|
||||
|
||||
def is_speech(audio_data, threshold=NOISE_THRESHOLD):
|
||||
"""Detect if audio segment contains speech based on amplitude and frequency content"""
|
||||
# Calculate RMS amplitude
|
||||
rms = np.sqrt(np.mean(np.square(audio_data)))
|
||||
|
||||
# Calculate signal energy in speech frequency range (100-4000 Hz)
|
||||
fft = np.fft.fft(audio_data)
|
||||
freqs = np.fft.fftfreq(len(audio_data), 1/SAMPLE_RATE)
|
||||
speech_mask = (np.abs(freqs) >= 100) & (np.abs(freqs) <= 4000)
|
||||
speech_energy = np.sum(np.abs(fft[speech_mask])) / len(audio_data)
|
||||
|
||||
# Enhanced echo detection
|
||||
# 1. Check for periodic patterns in the signal
|
||||
autocorr = np.correlate(audio_data, audio_data, mode='full')
|
||||
autocorr = autocorr[len(autocorr)//2:] # Use only positive lags
|
||||
peaks = np.where(autocorr > ECHO_THRESHOLD * np.max(autocorr))[0]
|
||||
peak_spacing = np.diff(peaks)
|
||||
has_periodic_echo = len(peak_spacing) > 2 and np.std(peak_spacing) < 0.1 * np.mean(peak_spacing)
|
||||
|
||||
# 2. Check for sudden amplitude changes
|
||||
amplitude_envelope = np.abs(audio_data)
|
||||
amplitude_changes = np.diff(amplitude_envelope)
|
||||
has_feedback_spikes = np.any(np.abs(amplitude_changes) > threshold * 2)
|
||||
|
||||
# 3. Check frequency distribution
|
||||
freq_magnitudes = np.abs(fft)[:len(fft)//2]
|
||||
peak_freqs = freqs[:len(fft)//2][np.argsort(freq_magnitudes)[-3:]]
|
||||
has_feedback_freqs = np.any((peak_freqs > 2000) & (peak_freqs < 4000))
|
||||
|
||||
# Combine all criteria
|
||||
is_valid_speech = (
|
||||
rms > threshold and
|
||||
speech_energy > threshold and
|
||||
not has_periodic_echo and
|
||||
not has_feedback_spikes and
|
||||
not has_feedback_freqs
|
||||
)
|
||||
|
||||
return is_valid_speech
|
||||
|
||||
def process_command(text):
|
||||
"""Process the transcribed command and execute appropriate action"""
|
||||
text = text.lower().strip()
|
||||
|
||||
# Skip if text is too short or contains numbers (likely noise)
|
||||
if len(text) < 5 or any(char.isdigit() for char in text):
|
||||
logger.debug("Text too short or contains numbers, skipping")
|
||||
return
|
||||
|
||||
# Enhanced noise pattern detection
|
||||
noise_patterns = ["lei", "los", "und", "aber", "nicht mehr", "das das", "und und"]
|
||||
for pattern in noise_patterns:
|
||||
if text.count(pattern) > 1: # More aggressive pattern filtering
|
||||
logger.debug(f"Detected noise pattern '{pattern}', skipping")
|
||||
return
|
||||
|
||||
# More aggressive repetition detection
|
||||
words = text.split()
|
||||
if len(words) >= 2:
|
||||
# Check for immediate word repetitions
|
||||
for i in range(len(words)-1):
|
||||
if words[i] == words[i+1]:
|
||||
logger.debug(f"Detected immediate word repetition: '{words[i]}', skipping")
|
||||
return
|
||||
|
||||
# Check for phrase repetitions
|
||||
phrases = [' '.join(words[i:i+2]) for i in range(len(words)-1)]
|
||||
phrase_counts = {}
|
||||
for phrase in phrases:
|
||||
phrase_counts[phrase] = phrase_counts.get(phrase, 0) + 1
|
||||
if phrase_counts[phrase] > MAX_REPETITIONS:
|
||||
logger.debug(f"Skipping due to excessive repetition: '{phrase}'")
|
||||
return
|
||||
|
||||
# German command mappings
|
||||
commands = {
|
||||
"ausschalten": "turn_off",
|
||||
"einschalten": "turn_on",
|
||||
"an": "turn_on",
|
||||
"aus": "turn_off"
|
||||
}
|
||||
|
||||
rooms = {
|
||||
"wohnzimmer": "living_room",
|
||||
"küche": "kitchen",
|
||||
"schlafzimmer": "bedroom",
|
||||
"bad": "bathroom"
|
||||
}
|
||||
|
||||
# Detect room
|
||||
detected_room = None
|
||||
for german_room, english_room in rooms.items():
|
||||
if german_room in text:
|
||||
detected_room = english_room
|
||||
break
|
||||
|
||||
# Detect command
|
||||
detected_command = None
|
||||
for german_cmd, english_cmd in commands.items():
|
||||
if german_cmd in text:
|
||||
detected_command = english_cmd
|
||||
break
|
||||
|
||||
if detected_room and detected_command:
|
||||
# Construct entity ID (assuming light)
|
||||
entity_id = f"light.{detected_room}"
|
||||
|
||||
# Send command to Home Assistant
|
||||
if send_command_to_hass("light", detected_command, entity_id):
|
||||
logger.info(f"Executed: {detected_command} for {entity_id}")
|
||||
else:
|
||||
logger.error("Failed to execute command")
|
||||
else:
|
||||
logger.debug(f"No command found in text: '{text}'")
|
||||
|
||||
class AudioProcessor:
|
||||
def __init__(self):
|
||||
logger.info("Initializing AudioProcessor...")
|
||||
self.audio_buffer = queue.Queue()
|
||||
self.recording = False
|
||||
self.buffer = np.zeros(SAMPLE_RATE * BUFFER_DURATION)
|
||||
self.buffer_lock = threading.Lock()
|
||||
self.last_transcription_time = 0
|
||||
self.stream = None
|
||||
self.speech_detected = False
|
||||
self.silence_frames = 0
|
||||
self.speech_frames = 0
|
||||
|
||||
# Initialize wake word detection only if enabled
|
||||
if WAKE_WORD_ENABLED:
|
||||
try:
|
||||
logger.info("Initializing wake word model...")
|
||||
self.wake_word_model = Model(vad_threshold=0.5)
|
||||
self.last_prediction = None
|
||||
logger.info("Wake word model initialized successfully")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize wake word model: {e}")
|
||||
raise
|
||||
else:
|
||||
self.wake_word_model = None
|
||||
self.last_prediction = None
|
||||
logger.info("Wake word detection disabled")
|
||||
|
||||
def should_transcribe(self):
|
||||
"""Determine if we should transcribe based on mode and timing"""
|
||||
current_time = datetime.now().timestamp()
|
||||
if not WAKE_WORD_ENABLED:
|
||||
# Check if enough time has passed since last transcription
|
||||
time_since_last = current_time - self.last_transcription_time
|
||||
if time_since_last >= CONTINUOUS_TRANSCRIPTION_INTERVAL:
|
||||
# Only transcribe if we detect speech
|
||||
frames_per_chunk = CHUNK_SIZE
|
||||
min_speech_frames = int(MIN_SPEECH_DURATION * SAMPLE_RATE / frames_per_chunk)
|
||||
|
||||
if self.speech_frames >= min_speech_frames:
|
||||
self.last_transcription_time = current_time
|
||||
self.speech_frames = 0 # Reset counter
|
||||
return True
|
||||
return False
|
||||
|
||||
def audio_callback(self, indata, frames, time, status):
|
||||
"""Callback for audio input"""
|
||||
if status:
|
||||
logger.warning(f"Audio callback status: {status}")
|
||||
|
||||
# Convert to mono if necessary
|
||||
if CHANNELS > 1:
|
||||
audio_data = np.mean(indata, axis=1)
|
||||
else:
|
||||
audio_data = indata.flatten()
|
||||
|
||||
# Check for speech
|
||||
if is_speech(audio_data):
|
||||
self.speech_frames += 1
|
||||
self.silence_frames = 0
|
||||
else:
|
||||
self.silence_frames += 1
|
||||
frames_per_chunk = CHUNK_SIZE
|
||||
silence_frames_threshold = int(SILENCE_DURATION * SAMPLE_RATE / frames_per_chunk)
|
||||
|
||||
if self.silence_frames >= silence_frames_threshold:
|
||||
self.speech_frames = 0
|
||||
|
||||
# Update circular buffer
|
||||
with self.buffer_lock:
|
||||
self.buffer = np.roll(self.buffer, -len(audio_data))
|
||||
self.buffer[-len(audio_data):] = audio_data
|
||||
|
||||
if WAKE_WORD_ENABLED:
|
||||
# Process for wake word detection
|
||||
self.last_prediction = self.wake_word_model.predict(audio_data)
|
||||
|
||||
# Check if wake word detected
|
||||
for wake_word in WAKE_WORDS:
|
||||
confidence = self.last_prediction[wake_word]
|
||||
if confidence > DETECTION_THRESHOLD:
|
||||
logger.info(
|
||||
f"Wake word: {WAKE_WORD_ALIAS} (confidence: {confidence:.2f})"
|
||||
)
|
||||
self.process_audio()
|
||||
break
|
||||
else:
|
||||
# Continuous transcription mode
|
||||
if self.should_transcribe():
|
||||
self.process_audio()
|
||||
|
||||
def process_audio(self):
|
||||
"""Process the current audio buffer (save and transcribe)"""
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"/audio/audio_segment_{timestamp}.wav"
|
||||
|
||||
# Save the audio buffer to a WAV file
|
||||
with wave.open(filename, 'wb') as wf:
|
||||
wf.setnchannels(CHANNELS)
|
||||
wf.setsampwidth(2) # 16-bit audio
|
||||
wf.setframerate(SAMPLE_RATE)
|
||||
|
||||
# Convert float32 to int16
|
||||
audio_data = (self.buffer * 32767).astype(np.int16)
|
||||
wf.writeframes(audio_data.tobytes())
|
||||
|
||||
logger.info(f"Saved audio segment to {filename}")
|
||||
|
||||
# Transcribe the audio with German language preference
|
||||
try:
|
||||
segments, info = asr_model.transcribe(
|
||||
filename,
|
||||
language="de", # Set German as preferred language
|
||||
beam_size=5,
|
||||
temperature=0
|
||||
)
|
||||
|
||||
# Get the full transcribed text
|
||||
transcribed_text = " ".join(segment.text for segment in segments)
|
||||
logger.info(f"Transcribed text: {transcribed_text}")
|
||||
|
||||
# Process the command
|
||||
process_command(transcribed_text)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during transcription or processing: {e}")
|
||||
|
||||
def start(self):
|
||||
"""Start audio processing"""
|
||||
try:
|
||||
logger.info("Starting audio processor...")
|
||||
|
||||
# Log configuration
|
||||
logger.debug(f"Sample Rate: {SAMPLE_RATE}")
|
||||
logger.debug(f"Channels: {CHANNELS}")
|
||||
logger.debug(f"Chunk Size: {CHUNK_SIZE}")
|
||||
logger.debug(f"Buffer Duration: {BUFFER_DURATION}")
|
||||
logger.debug(f"Wake Word Enabled: {WAKE_WORD_ENABLED}")
|
||||
logger.debug(f"Speech Enabled: {SPEECH_ENABLED}")
|
||||
logger.debug(f"ASR Model: {os.environ.get('ASR_MODEL')}")
|
||||
|
||||
if WAKE_WORD_ENABLED:
|
||||
logger.info("Initializing wake word detection...")
|
||||
logger.info(f"Loaded wake words: {', '.join(WAKE_WORDS)}")
|
||||
else:
|
||||
logger.info("Starting continuous transcription mode...")
|
||||
interval = CONTINUOUS_TRANSCRIPTION_INTERVAL
|
||||
logger.info(f"Will transcribe every {interval} seconds")
|
||||
|
||||
try:
|
||||
logger.debug("Setting up audio input stream...")
|
||||
with sd.InputStream(
|
||||
channels=CHANNELS,
|
||||
samplerate=SAMPLE_RATE,
|
||||
blocksize=CHUNK_SIZE,
|
||||
callback=self.audio_callback
|
||||
):
|
||||
logger.info("Audio input stream started successfully")
|
||||
logger.info("Listening for audio input...")
|
||||
logger.info("Press Ctrl+C to stop")
|
||||
|
||||
while True:
|
||||
sd.sleep(1000) # Sleep for 1 second
|
||||
|
||||
except sd.PortAudioError as e:
|
||||
logger.error(f"Error setting up audio stream: {e}")
|
||||
logger.error("Check if microphone is connected and accessible")
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in audio stream: {e}")
|
||||
raise
|
||||
|
||||
except KeyboardInterrupt:
|
||||
logger.info("\nStopping audio processing...")
|
||||
except Exception as e:
|
||||
logger.error("Critical error in audio processing", exc_info=True)
|
||||
raise
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
logger.info("Initializing AudioProcessor...")
|
||||
processor = AudioProcessor()
|
||||
processor.start()
|
||||
except Exception as e:
|
||||
logger.error("Failed to start AudioProcessor", exc_info=True)
|
||||
raise
|
||||
419
docs/API.md
419
docs/API.md
@@ -1,419 +0,0 @@
|
||||
# API Reference
|
||||
|
||||
## MCP Schema Endpoint
|
||||
|
||||
The server exposes an MCP (Model Context Protocol) schema endpoint that describes all available tools and their parameters:
|
||||
|
||||
```http
|
||||
GET /mcp
|
||||
```
|
||||
|
||||
This endpoint returns a JSON schema describing all available tools, their parameters, and documentation resources. The schema follows the MCP specification and can be used by LLM clients to understand the server's capabilities.
|
||||
|
||||
Example response:
|
||||
```json
|
||||
{
|
||||
"tools": [
|
||||
{
|
||||
"name": "list_devices",
|
||||
"description": "List all devices connected to Home Assistant",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"domain": {
|
||||
"type": "string",
|
||||
"enum": ["light", "climate", "alarm_control_panel", ...]
|
||||
},
|
||||
"area": { "type": "string" },
|
||||
"floor": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
// ... other tools
|
||||
],
|
||||
"prompts": [],
|
||||
"resources": [
|
||||
{
|
||||
"name": "Home Assistant API",
|
||||
"url": "https://developers.home-assistant.io/docs/api/rest/"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Note: The `/mcp` endpoint is publicly accessible and does not require authentication, as it only provides schema information.
|
||||
|
||||
## Device Control
|
||||
|
||||
### Common Entity Controls
|
||||
```json
|
||||
{
|
||||
"tool": "control",
|
||||
"command": "turn_on", // or "turn_off", "toggle"
|
||||
"entity_id": "light.living_room"
|
||||
}
|
||||
```
|
||||
|
||||
### Light Control
|
||||
```json
|
||||
{
|
||||
"tool": "control",
|
||||
"command": "turn_on",
|
||||
"entity_id": "light.living_room",
|
||||
"brightness": 128,
|
||||
"color_temp": 4000,
|
||||
"rgb_color": [255, 0, 0]
|
||||
}
|
||||
```
|
||||
|
||||
## Add-on Management
|
||||
|
||||
### List Available Add-ons
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "list"
|
||||
}
|
||||
```
|
||||
|
||||
### Install Add-on
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "install",
|
||||
"slug": "core_configurator",
|
||||
"version": "5.6.0"
|
||||
}
|
||||
```
|
||||
|
||||
### Manage Add-on State
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "start", // or "stop", "restart"
|
||||
"slug": "core_configurator"
|
||||
}
|
||||
```
|
||||
|
||||
## Package Management
|
||||
|
||||
### List HACS Packages
|
||||
```json
|
||||
{
|
||||
"tool": "package",
|
||||
"action": "list",
|
||||
"category": "integration" // or "plugin", "theme", "python_script", "appdaemon", "netdaemon"
|
||||
}
|
||||
```
|
||||
|
||||
### Install Package
|
||||
```json
|
||||
{
|
||||
"tool": "package",
|
||||
"action": "install",
|
||||
"category": "integration",
|
||||
"repository": "hacs/integration",
|
||||
"version": "1.32.0"
|
||||
}
|
||||
```
|
||||
|
||||
## Automation Management
|
||||
|
||||
### Create Automation
|
||||
```json
|
||||
{
|
||||
"tool": "automation_config",
|
||||
"action": "create",
|
||||
"config": {
|
||||
"alias": "Motion Light",
|
||||
"description": "Turn on light when motion detected",
|
||||
"mode": "single",
|
||||
"trigger": [
|
||||
{
|
||||
"platform": "state",
|
||||
"entity_id": "binary_sensor.motion",
|
||||
"to": "on"
|
||||
}
|
||||
],
|
||||
"action": [
|
||||
{
|
||||
"service": "light.turn_on",
|
||||
"target": {
|
||||
"entity_id": "light.living_room"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Duplicate Automation
|
||||
```json
|
||||
{
|
||||
"tool": "automation_config",
|
||||
"action": "duplicate",
|
||||
"automation_id": "automation.motion_light"
|
||||
}
|
||||
```
|
||||
|
||||
## Core Functions
|
||||
|
||||
### State Management
|
||||
```http
|
||||
GET /api/state
|
||||
POST /api/state
|
||||
```
|
||||
|
||||
Manages the current state of the system.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/state
|
||||
{
|
||||
"context": "living_room",
|
||||
"state": {
|
||||
"lights": "on",
|
||||
"temperature": 22
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Context Updates
|
||||
```http
|
||||
POST /api/context
|
||||
```
|
||||
|
||||
Updates the current context with new information.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/context
|
||||
{
|
||||
"user": "john",
|
||||
"location": "kitchen",
|
||||
"time": "morning",
|
||||
"activity": "cooking"
|
||||
}
|
||||
```
|
||||
|
||||
## Action Endpoints
|
||||
|
||||
### Execute Action
|
||||
```http
|
||||
POST /api/action
|
||||
```
|
||||
|
||||
Executes a specified action with given parameters.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/action
|
||||
{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room",
|
||||
"brightness": 80
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Batch Actions
|
||||
```http
|
||||
POST /api/actions/batch
|
||||
```
|
||||
|
||||
Executes multiple actions in sequence.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/actions/batch
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "set_temperature",
|
||||
"parameters": {
|
||||
"temperature": 22
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Query Functions
|
||||
|
||||
### Get Available Actions
|
||||
```http
|
||||
GET /api/actions
|
||||
```
|
||||
|
||||
Returns a list of all available actions.
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"name": "turn_on_lights",
|
||||
"parameters": ["room", "brightness"],
|
||||
"description": "Turns on lights in specified room"
|
||||
},
|
||||
{
|
||||
"name": "set_temperature",
|
||||
"parameters": ["temperature"],
|
||||
"description": "Sets temperature in current context"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Context Query
|
||||
```http
|
||||
GET /api/context?type=current
|
||||
```
|
||||
|
||||
Retrieves context information.
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"current_context": {
|
||||
"user": "john",
|
||||
"location": "kitchen",
|
||||
"time": "morning",
|
||||
"activity": "cooking"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## WebSocket Events
|
||||
|
||||
The server supports real-time updates via WebSocket connections.
|
||||
|
||||
```javascript
|
||||
// Client-side connection example
|
||||
const ws = new WebSocket('ws://localhost:3000/ws');
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
console.log('Received update:', data);
|
||||
};
|
||||
```
|
||||
|
||||
### Supported Events
|
||||
|
||||
- `state_change`: Emitted when system state changes
|
||||
- `context_update`: Emitted when context is updated
|
||||
- `action_executed`: Emitted when an action is completed
|
||||
- `error`: Emitted when an error occurs
|
||||
|
||||
**Example Event Data:**
|
||||
```json
|
||||
{
|
||||
"event": "state_change",
|
||||
"data": {
|
||||
"previous_state": {
|
||||
"lights": "off"
|
||||
},
|
||||
"current_state": {
|
||||
"lights": "on"
|
||||
},
|
||||
"timestamp": "2024-03-20T10:30:00Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
All endpoints return standard HTTP status codes:
|
||||
|
||||
- 200: Success
|
||||
- 400: Bad Request
|
||||
- 401: Unauthorized
|
||||
- 403: Forbidden
|
||||
- 404: Not Found
|
||||
- 500: Internal Server Error
|
||||
|
||||
**Error Response Format:**
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "INVALID_PARAMETERS",
|
||||
"message": "Missing required parameter: room",
|
||||
"details": {
|
||||
"missing_fields": ["room"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
The API implements rate limiting to prevent abuse:
|
||||
|
||||
- 100 requests per minute per IP for regular endpoints
|
||||
- 1000 requests per minute per IP for WebSocket connections
|
||||
|
||||
When rate limit is exceeded, the server returns:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "RATE_LIMIT_EXCEEDED",
|
||||
"message": "Too many requests",
|
||||
"reset_time": "2024-03-20T10:31:00Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
### Using curl
|
||||
```bash
|
||||
# Get current state
|
||||
curl -X GET \
|
||||
http://localhost:3000/api/state \
|
||||
-H 'Authorization: ApiKey your_api_key_here'
|
||||
|
||||
# Execute action
|
||||
curl -X POST \
|
||||
http://localhost:3000/api/action \
|
||||
-H 'Authorization: ApiKey your_api_key_here' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room",
|
||||
"brightness": 80
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Using JavaScript
|
||||
```javascript
|
||||
// Execute action
|
||||
async function executeAction() {
|
||||
const response = await fetch('http://localhost:3000/api/action', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': 'ApiKey your_api_key_here',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
action: 'turn_on_lights',
|
||||
parameters: {
|
||||
room: 'living_room',
|
||||
brightness: 80
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Action result:', data);
|
||||
}
|
||||
```
|
||||
23
docs/Gemfile
Normal file
23
docs/Gemfile
Normal file
@@ -0,0 +1,23 @@
|
||||
source "https://rubygems.org"
|
||||
|
||||
gem "github-pages", group: :jekyll_plugins
|
||||
gem "jekyll-theme-minimal"
|
||||
gem "jekyll-relative-links"
|
||||
gem "jekyll-seo-tag"
|
||||
gem "jekyll-remote-theme"
|
||||
gem "jekyll-github-metadata"
|
||||
gem "faraday-retry"
|
||||
|
||||
# Windows and JRuby does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||
# and associated library.
|
||||
platforms :mingw, :x64_mingw, :mswin, :jruby do
|
||||
gem "tzinfo", ">= 1"
|
||||
gem "tzinfo-data"
|
||||
end
|
||||
|
||||
# Lock `http_parser.rb` gem to `v0.6.x` on JRuby builds since newer versions of the gem
|
||||
# do not have a Java counterpart.
|
||||
gem "http_parser.rb", "~> 0.6.0", :platforms => [:jruby]
|
||||
|
||||
# Add webrick for Ruby 3.0+
|
||||
gem "webrick", "~> 1.7"
|
||||
@@ -1,60 +0,0 @@
|
||||
# Home Assistant MCP Documentation
|
||||
|
||||
Welcome to the Home Assistant MCP (Master Control Program) documentation. This documentation provides comprehensive information about setting up, configuring, and using the Home Assistant MCP.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
1. [Getting Started](./getting-started.md)
|
||||
- Installation
|
||||
- Configuration
|
||||
- First Steps
|
||||
|
||||
2. [API Reference](./API.md)
|
||||
- REST API Endpoints
|
||||
- Authentication
|
||||
- Error Handling
|
||||
|
||||
3. [SSE (Server-Sent Events)](./SSE_API.md)
|
||||
- Event Subscriptions
|
||||
- Real-time Updates
|
||||
- Connection Management
|
||||
|
||||
4. [Tools](./tools/README.md)
|
||||
- Device Control
|
||||
- Automation Management
|
||||
- Add-on Management
|
||||
- Package Management
|
||||
|
||||
5. [Configuration](./configuration/README.md)
|
||||
- Environment Variables
|
||||
- Security Settings
|
||||
- Performance Tuning
|
||||
|
||||
6. [Development](./development/README.md)
|
||||
- Project Structure
|
||||
- Contributing Guidelines
|
||||
- Testing
|
||||
|
||||
7. [Troubleshooting](./troubleshooting.md)
|
||||
- Common Issues
|
||||
- Debugging
|
||||
- FAQ
|
||||
|
||||
## Quick Links
|
||||
|
||||
- [GitHub Repository](https://github.com/yourusername/homeassistant-mcp)
|
||||
- [Issue Tracker](https://github.com/yourusername/homeassistant-mcp/issues)
|
||||
- [Change Log](./CHANGELOG.md)
|
||||
- [Security Policy](./SECURITY.md)
|
||||
|
||||
## Support
|
||||
|
||||
If you need help or have questions:
|
||||
|
||||
1. Check the [Troubleshooting Guide](./troubleshooting.md)
|
||||
2. Search existing [Issues](https://github.com/yourusername/homeassistant-mcp/issues)
|
||||
3. Create a new issue if your problem isn't already reported
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the [LICENSE](../LICENSE) file for details.
|
||||
78
docs/_config.yml
Normal file
78
docs/_config.yml
Normal file
@@ -0,0 +1,78 @@
|
||||
title: Model Context Protocol (MCP)
|
||||
description: A bridge between Home Assistant and Language Learning Models
|
||||
theme: jekyll-theme-minimal
|
||||
markdown: kramdown
|
||||
|
||||
# Repository settings
|
||||
repository: jango-blockchained/advanced-homeassistant-mcp
|
||||
github: [metadata]
|
||||
|
||||
# Add base URL and URL settings
|
||||
baseurl: "/advanced-homeassistant-mcp" # the subpath of your site
|
||||
url: "https://jango-blockchained.github.io" # the base hostname & protocol
|
||||
|
||||
# Theme settings
|
||||
logo: /assets/img/logo.png # path to logo (create this if you want a logo)
|
||||
show_downloads: true # show download buttons for your repo
|
||||
|
||||
plugins:
|
||||
- jekyll-relative-links
|
||||
- jekyll-seo-tag
|
||||
- jekyll-remote-theme
|
||||
- jekyll-github-metadata
|
||||
|
||||
# Enable relative links
|
||||
relative_links:
|
||||
enabled: true
|
||||
collections: true
|
||||
|
||||
# Navigation structure
|
||||
header_pages:
|
||||
- index.md
|
||||
- getting-started.md
|
||||
- api.md
|
||||
- usage.md
|
||||
- tools/tools.md
|
||||
- development/development.md
|
||||
- troubleshooting.md
|
||||
- contributing.md
|
||||
- roadmap.md
|
||||
|
||||
# Collections
|
||||
collections:
|
||||
tools:
|
||||
output: true
|
||||
permalink: /:collection/:name
|
||||
development:
|
||||
output: true
|
||||
permalink: /:collection/:name
|
||||
|
||||
# Default layouts
|
||||
defaults:
|
||||
- scope:
|
||||
path: ""
|
||||
type: "pages"
|
||||
values:
|
||||
layout: "default"
|
||||
- scope:
|
||||
path: "tools"
|
||||
type: "tools"
|
||||
values:
|
||||
layout: "default"
|
||||
- scope:
|
||||
path: "development"
|
||||
type: "development"
|
||||
values:
|
||||
layout: "default"
|
||||
|
||||
# Exclude files from processing
|
||||
exclude:
|
||||
- Gemfile
|
||||
- Gemfile.lock
|
||||
- node_modules
|
||||
- vendor
|
||||
|
||||
# Sass settings
|
||||
sass:
|
||||
style: compressed
|
||||
sass_dir: _sass
|
||||
52
docs/_layouts/default.html
Normal file
52
docs/_layouts/default.html
Normal file
@@ -0,0 +1,52 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="{{ site.lang | default: " en-US" }}">
|
||||
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
{% seo %}
|
||||
<link rel="stylesheet" href="{{ " /assets/css/style.css?v=" | append: site.github.build_revision | relative_url }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div class="wrapper">
|
||||
<header>
|
||||
<h1><a href="{{ " /" | absolute_url }}">{{ site.title | default: site.github.repository_name }}</a></h1>
|
||||
|
||||
{% if site.logo %}
|
||||
<img src="{{site.logo | relative_url}}" alt="Logo" />
|
||||
{% endif %}
|
||||
|
||||
<p>{{ site.description | default: site.github.project_tagline }}</p>
|
||||
|
||||
<p class="view"><a href="{{ site.github.repository_url }}">View the Project on GitHub <small>{{
|
||||
site.github.repository_nwo }}</small></a></p>
|
||||
|
||||
<nav class="main-nav">
|
||||
<h3>Documentation</h3>
|
||||
<ul>
|
||||
<li><a href="{{ '/getting-started' | relative_url }}">Getting Started</a></li>
|
||||
<li><a href="{{ '/api' | relative_url }}">API Reference</a></li>
|
||||
<li><a href="{{ '/sse-api' | relative_url }}">SSE API</a></li>
|
||||
<li><a href="{{ '/architecture' | relative_url }}">Architecture</a></li>
|
||||
<li><a href="{{ '/contributing' | relative_url }}">Contributing</a></li>
|
||||
<li><a href="{{ '/troubleshooting' | relative_url }}">Troubleshooting</a></li>
|
||||
</ul>
|
||||
</nav>
|
||||
</header>
|
||||
<section>
|
||||
{{ content }}
|
||||
</section>
|
||||
<footer>
|
||||
{% if site.github.is_project_page %}
|
||||
<p>This project is maintained by <a href="{{ site.github.owner_url }}">{{ site.github.owner_name }}</a></p>
|
||||
{% endif %}
|
||||
<p><small>Hosted on GitHub Pages — Theme by <a
|
||||
href="https://github.com/orderedlist">orderedlist</a></small></p>
|
||||
</footer>
|
||||
</div>
|
||||
<script src="{{ " /assets/js/scale.fix.js" | relative_url }}"></script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
728
docs/api.md
Normal file
728
docs/api.md
Normal file
@@ -0,0 +1,728 @@
|
||||
# 🚀 Home Assistant MCP API Documentation
|
||||
|
||||
 
|
||||
|
||||
## 🌟 Quick Start
|
||||
|
||||
```bash
|
||||
# Get API schema with caching
|
||||
curl -X GET http://localhost:3000/mcp \
|
||||
-H "Cache-Control: max-age=3600" # Cache for 1 hour
|
||||
```
|
||||
|
||||
## 🔌 Core Functions ⚙️
|
||||
|
||||
### State Management (`/api/state`)
|
||||
```http
|
||||
GET /api/state?cache=true # Enable client-side caching
|
||||
POST /api/state
|
||||
```
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
{
|
||||
"context": "living_room",
|
||||
"state": {
|
||||
"lights": "on",
|
||||
"temperature": 22
|
||||
},
|
||||
"_cache": { // Optional caching config
|
||||
"ttl": 300, // 5 minutes
|
||||
"tags": ["lights", "climate"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## ⚡ Action Endpoints
|
||||
|
||||
### Execute Action with Cache Validation
|
||||
```http
|
||||
POST /api/action
|
||||
If-None-Match: "etag_value" // Prevent duplicate actions
|
||||
```
|
||||
|
||||
**Batch Processing:**
|
||||
```json
|
||||
{
|
||||
"actions": [
|
||||
{ "action": "🌞 Morning Routine", "params": { "brightness": 80 } },
|
||||
{ "action": "❄️ AC Control", "params": { "temp": 21 } }
|
||||
],
|
||||
"_parallel": true // Execute actions concurrently
|
||||
}
|
||||
```
|
||||
|
||||
## 🔍 Query Functions
|
||||
|
||||
### Available Actions with ETag
|
||||
```http
|
||||
GET /api/actions
|
||||
ETag: "a1b2c3d4" // Client-side cache validation
|
||||
```
|
||||
|
||||
**Response Headers:**
|
||||
```
|
||||
Cache-Control: public, max-age=86400 // 24-hour cache
|
||||
ETag: "a1b2c3d4"
|
||||
```
|
||||
|
||||
## 🌐 WebSocket Events
|
||||
|
||||
```javascript
|
||||
const ws = new WebSocket('wss://ha-mcp/ws');
|
||||
ws.onmessage = ({ data }) => {
|
||||
const event = JSON.parse(data);
|
||||
if(event.type === 'STATE_UPDATE') {
|
||||
updateUI(event.payload); // 🎨 Real-time UI sync
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
## 🗃️ Caching Strategies
|
||||
|
||||
### Client-Side Caching
|
||||
```http
|
||||
GET /api/devices
|
||||
Cache-Control: max-age=300, stale-while-revalidate=60
|
||||
```
|
||||
|
||||
### Server-Side Cache-Control
|
||||
```typescript
|
||||
// Example middleware configuration
|
||||
app.use(
|
||||
cacheMiddleware({
|
||||
ttl: 60 * 5, // 5 minutes
|
||||
paths: ['/api/devices', '/mcp'],
|
||||
vary: ['Authorization'] // User-specific caching
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
## ❌ Error Handling
|
||||
|
||||
**429 Too Many Requests:**
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "RATE_LIMITED",
|
||||
"message": "Slow down! 🐢",
|
||||
"retry_after": 30,
|
||||
"docs": "https://ha-mcp/docs/rate-limits"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 🚦 Rate Limiting Tiers
|
||||
|
||||
| Tier | Requests/min | Features |
|
||||
|---------------|--------------|------------------------|
|
||||
| Guest | 10 | Basic read-only |
|
||||
| User | 100 | Full access |
|
||||
| Power User | 500 | Priority queue |
|
||||
| Integration | 1000 | Bulk operations |
|
||||
|
||||
## 🛠️ Example Usage
|
||||
|
||||
### Smart Cache Refresh
|
||||
```javascript
|
||||
async function getDevices() {
|
||||
const response = await fetch('/api/devices', {
|
||||
headers: {
|
||||
'If-None-Match': localStorage.getItem('devicesETag')
|
||||
}
|
||||
});
|
||||
|
||||
if(response.status === 304) { // Not Modified
|
||||
return JSON.parse(localStorage.devicesCache);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
localStorage.setItem('devicesETag', response.headers.get('ETag'));
|
||||
localStorage.setItem('devicesCache', JSON.stringify(data));
|
||||
return data;
|
||||
}
|
||||
```
|
||||
|
||||
## 🔒 Security Middleware (Enhanced)
|
||||
|
||||
### Cache-Aware Rate Limiting
|
||||
```typescript
|
||||
app.use(
|
||||
rateLimit({
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 100, // Limit each IP to 100 requests per window
|
||||
cache: new RedisStore(), // Distributed cache
|
||||
keyGenerator: (req) => {
|
||||
return `${req.ip}-${req.headers.authorization}`;
|
||||
}
|
||||
})
|
||||
);
|
||||
```
|
||||
|
||||
### Security Headers
|
||||
```http
|
||||
Content-Security-Policy: default-src 'self';
|
||||
Strict-Transport-Security: max-age=31536000;
|
||||
X-Content-Type-Options: nosniff;
|
||||
Cache-Control: public, max-age=600;
|
||||
ETag: "abc123"
|
||||
```
|
||||
|
||||
## 📘 Best Practices
|
||||
|
||||
1. **Cache Wisely:** Use `ETag` and `Cache-Control` headers for state data
|
||||
2. **Batch Operations:** Combine requests using `/api/actions/batch`
|
||||
3. **WebSocket First:** Prefer real-time updates over polling
|
||||
4. **Error Recovery:** Implement exponential backoff with jitter
|
||||
5. **Cache Invalidation:** Use tags for bulk invalidation
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
A[Client] -->|Cached Request| B{CDN}
|
||||
B -->|Cache Hit| C[Return 304]
|
||||
B -->|Cache Miss| D[Origin Server]
|
||||
D -->|Response| B
|
||||
B -->|Response| A
|
||||
```
|
||||
|
||||
> Pro Tip: Use `curl -I` to inspect cache headers! 🔍
|
||||
|
||||
## Device Control
|
||||
|
||||
### Common Entity Controls
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "control",
|
||||
"command": "turn_on", // Options: "turn_on", "turn_off", "toggle"
|
||||
"entity_id": "light.living_room"
|
||||
}
|
||||
```
|
||||
|
||||
### Light Control
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "control",
|
||||
"command": "turn_on",
|
||||
"entity_id": "light.living_room",
|
||||
"brightness": 128,
|
||||
"color_temp": 4000,
|
||||
"rgb_color": [255, 0, 0]
|
||||
}
|
||||
```
|
||||
|
||||
## Add-on Management
|
||||
|
||||
### List Available Add-ons
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "list"
|
||||
}
|
||||
```
|
||||
|
||||
### Install Add-on
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "install",
|
||||
"slug": "core_configurator",
|
||||
"version": "5.6.0"
|
||||
}
|
||||
```
|
||||
|
||||
### Manage Add-on State
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "addon",
|
||||
"action": "start", // Options: "start", "stop", "restart"
|
||||
"slug": "core_configurator"
|
||||
}
|
||||
```
|
||||
|
||||
## Package Management
|
||||
|
||||
### List HACS Packages
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "package",
|
||||
"action": "list",
|
||||
"category": "integration" // Options: "integration", "plugin", "theme", "python_script", "appdaemon", "netdaemon"
|
||||
}
|
||||
```
|
||||
|
||||
### Install Package
|
||||
|
||||
```json
|
||||
{
|
||||
"tool": "package",
|
||||
"action": "install",
|
||||
"category": "integration",
|
||||
"repository": "hacs/integration",
|
||||
"version": "1.32.0"
|
||||
}
|
||||
```
|
||||
|
||||
## Automation Management
|
||||
|
||||
For automation management details and endpoints, please refer to the [Tools Documentation](tools/tools.md).
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Validate and sanitize all user inputs.
|
||||
- Enforce rate limiting to prevent abuse.
|
||||
- Apply proper security headers.
|
||||
- Gracefully handle errors based on the environment.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you experience issues with the API:
|
||||
- Verify the endpoint and request payload.
|
||||
- Check authentication tokens and required headers.
|
||||
- Consult the [Troubleshooting Guide](troubleshooting.md) for further guidance.
|
||||
|
||||
## MCP Schema Endpoint
|
||||
|
||||
The server exposes an MCP (Model Context Protocol) schema endpoint that describes all available tools and their parameters:
|
||||
|
||||
```http
|
||||
GET /mcp
|
||||
```
|
||||
|
||||
This endpoint returns a JSON schema describing all available tools, their parameters, and documentation resources. The schema follows the MCP specification and can be used by LLM clients to understand the server's capabilities.
|
||||
|
||||
Example response:
|
||||
```json
|
||||
{
|
||||
"tools": [
|
||||
{
|
||||
"name": "list_devices",
|
||||
"description": "List all devices connected to Home Assistant",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"domain": {
|
||||
"type": "string",
|
||||
"enum": ["light", "climate", "alarm_control_panel", ...]
|
||||
},
|
||||
"area": { "type": "string" },
|
||||
"floor": { "type": "string" }
|
||||
}
|
||||
}
|
||||
},
|
||||
// ... other tools
|
||||
],
|
||||
"prompts": [],
|
||||
"resources": [
|
||||
{
|
||||
"name": "Home Assistant API",
|
||||
"url": "https://developers.home-assistant.io/docs/api/rest/"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
Note: The `/mcp` endpoint is publicly accessible and does not require authentication, as it only provides schema information.
|
||||
|
||||
## Core Functions
|
||||
|
||||
### State Management
|
||||
```http
|
||||
GET /api/state
|
||||
POST /api/state
|
||||
```
|
||||
|
||||
Manages the current state of the system.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/state
|
||||
{
|
||||
"context": "living_room",
|
||||
"state": {
|
||||
"lights": "on",
|
||||
"temperature": 22
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Context Updates
|
||||
```http
|
||||
POST /api/context
|
||||
```
|
||||
|
||||
Updates the current context with new information.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/context
|
||||
{
|
||||
"user": "john",
|
||||
"location": "kitchen",
|
||||
"time": "morning",
|
||||
"activity": "cooking"
|
||||
}
|
||||
```
|
||||
|
||||
## Action Endpoints
|
||||
|
||||
### Execute Action
|
||||
```http
|
||||
POST /api/action
|
||||
```
|
||||
|
||||
Executes a specified action with given parameters.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/action
|
||||
{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room",
|
||||
"brightness": 80
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Batch Actions
|
||||
```http
|
||||
POST /api/actions/batch
|
||||
```
|
||||
|
||||
Executes multiple actions in sequence.
|
||||
|
||||
**Example Request:**
|
||||
```json
|
||||
POST /api/actions/batch
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "set_temperature",
|
||||
"parameters": {
|
||||
"temperature": 22
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
## Query Functions
|
||||
|
||||
### Get Available Actions
|
||||
```http
|
||||
GET /api/actions
|
||||
```
|
||||
|
||||
Returns a list of all available actions.
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"actions": [
|
||||
{
|
||||
"name": "turn_on_lights",
|
||||
"parameters": ["room", "brightness"],
|
||||
"description": "Turns on lights in specified room"
|
||||
},
|
||||
{
|
||||
"name": "set_temperature",
|
||||
"parameters": ["temperature"],
|
||||
"description": "Sets temperature in current context"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Context Query
|
||||
```http
|
||||
GET /api/context?type=current
|
||||
```
|
||||
|
||||
Retrieves context information.
|
||||
|
||||
**Example Response:**
|
||||
```json
|
||||
{
|
||||
"current_context": {
|
||||
"user": "john",
|
||||
"location": "kitchen",
|
||||
"time": "morning",
|
||||
"activity": "cooking"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## WebSocket Events
|
||||
|
||||
The server supports real-time updates via WebSocket connections.
|
||||
|
||||
```javascript
|
||||
// Client-side connection example
|
||||
const ws = new WebSocket('ws://localhost:3000/ws');
|
||||
|
||||
ws.onmessage = (event) => {
|
||||
const data = JSON.parse(event.data);
|
||||
console.log('Received update:', data);
|
||||
};
|
||||
```
|
||||
|
||||
### Supported Events
|
||||
|
||||
- `state_change`: Emitted when system state changes
|
||||
- `context_update`: Emitted when context is updated
|
||||
- `action_executed`: Emitted when an action is completed
|
||||
- `error`: Emitted when an error occurs
|
||||
|
||||
**Example Event Data:**
|
||||
```json
|
||||
{
|
||||
"event": "state_change",
|
||||
"data": {
|
||||
"previous_state": {
|
||||
"lights": "off"
|
||||
},
|
||||
"current_state": {
|
||||
"lights": "on"
|
||||
},
|
||||
"timestamp": "2024-03-20T10:30:00Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
All endpoints return standard HTTP status codes:
|
||||
|
||||
- 200: Success
|
||||
- 400: Bad Request
|
||||
- 401: Unauthorized
|
||||
- 403: Forbidden
|
||||
- 404: Not Found
|
||||
- 500: Internal Server Error
|
||||
|
||||
**Error Response Format:**
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "INVALID_PARAMETERS",
|
||||
"message": "Missing required parameter: room",
|
||||
"details": {
|
||||
"missing_fields": ["room"]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
The API implements rate limiting to prevent abuse:
|
||||
|
||||
- 100 requests per minute per IP for regular endpoints
|
||||
- 1000 requests per minute per IP for WebSocket connections
|
||||
|
||||
When rate limit is exceeded, the server returns:
|
||||
|
||||
```json
|
||||
{
|
||||
"error": {
|
||||
"code": "RATE_LIMIT_EXCEEDED",
|
||||
"message": "Too many requests",
|
||||
"reset_time": "2024-03-20T10:31:00Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Example Usage
|
||||
|
||||
### Using curl
|
||||
```bash
|
||||
# Get current state
|
||||
curl -X GET \
|
||||
http://localhost:3000/api/state \
|
||||
-H 'Authorization: ApiKey your_api_key_here'
|
||||
|
||||
# Execute action
|
||||
curl -X POST \
|
||||
http://localhost:3000/api/action \
|
||||
-H 'Authorization: ApiKey your_api_key_here' \
|
||||
-H 'Content-Type: application/json' \
|
||||
-d '{
|
||||
"action": "turn_on_lights",
|
||||
"parameters": {
|
||||
"room": "living_room",
|
||||
"brightness": 80
|
||||
}
|
||||
}'
|
||||
```
|
||||
|
||||
### Using JavaScript
|
||||
```javascript
|
||||
// Execute action
|
||||
async function executeAction() {
|
||||
const response = await fetch('http://localhost:3000/api/action', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': 'ApiKey your_api_key_here',
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
action: 'turn_on_lights',
|
||||
parameters: {
|
||||
room: 'living_room',
|
||||
brightness: 80
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Action result:', data);
|
||||
}
|
||||
```
|
||||
|
||||
## Security Middleware
|
||||
|
||||
### Overview
|
||||
|
||||
The security middleware provides a comprehensive set of utility functions to enhance the security of the Home Assistant MCP application. These functions cover various aspects of web security, including:
|
||||
|
||||
- Rate limiting
|
||||
- Request validation
|
||||
- Input sanitization
|
||||
- Security headers
|
||||
- Error handling
|
||||
|
||||
### Utility Functions
|
||||
|
||||
#### `checkRateLimit(ip: string, maxRequests?: number, windowMs?: number)`
|
||||
|
||||
Manages rate limiting for IP addresses to prevent abuse.
|
||||
|
||||
**Parameters**:
|
||||
- `ip`: IP address to track
|
||||
- `maxRequests`: Maximum number of requests allowed (default: 100)
|
||||
- `windowMs`: Time window for rate limiting (default: 15 minutes)
|
||||
|
||||
**Returns**: `boolean` or throws an error if limit is exceeded
|
||||
|
||||
**Example**:
|
||||
```typescript
|
||||
try {
|
||||
checkRateLimit('127.0.0.1'); // Checks rate limit with default settings
|
||||
} catch (error) {
|
||||
// Handle rate limit exceeded
|
||||
}
|
||||
```
|
||||
|
||||
#### `validateRequestHeaders(request: Request, requiredContentType?: string)`
|
||||
|
||||
Validates incoming HTTP request headers for security and compliance.
|
||||
|
||||
**Parameters**:
|
||||
- `request`: The incoming HTTP request
|
||||
- `requiredContentType`: Expected content type (default: 'application/json')
|
||||
|
||||
**Checks**:
|
||||
- Content type
|
||||
- Request body size
|
||||
- Authorization header (optional)
|
||||
|
||||
**Example**:
|
||||
```typescript
|
||||
try {
|
||||
validateRequestHeaders(request);
|
||||
} catch (error) {
|
||||
// Handle validation errors
|
||||
}
|
||||
```
|
||||
|
||||
#### `sanitizeValue(value: unknown)`
|
||||
|
||||
Sanitizes input values to prevent XSS attacks.
|
||||
|
||||
**Features**:
|
||||
- Escapes HTML tags
|
||||
- Handles nested objects and arrays
|
||||
- Preserves non-string values
|
||||
|
||||
**Example**:
|
||||
```typescript
|
||||
const sanitized = sanitizeValue('<script>alert("xss")</script>');
|
||||
// Returns: '<script>alert("xss")</script>'
|
||||
```
|
||||
|
||||
#### `applySecurityHeaders(request: Request, helmetConfig?: HelmetOptions)`
|
||||
|
||||
Applies security headers to HTTP requests using Helmet.
|
||||
|
||||
**Security Headers**:
|
||||
- Content Security Policy
|
||||
- X-Frame-Options
|
||||
- X-Content-Type-Options
|
||||
- Referrer Policy
|
||||
- HSTS (in production)
|
||||
|
||||
**Example**:
|
||||
```typescript
|
||||
const headers = applySecurityHeaders(request);
|
||||
```
|
||||
|
||||
#### `handleError(error: Error, env?: string)`
|
||||
|
||||
Handles error responses with environment-specific details.
|
||||
|
||||
**Modes**:
|
||||
- Production: Generic error message
|
||||
- Development: Detailed error with stack trace
|
||||
|
||||
**Example**:
|
||||
```typescript
|
||||
const errorResponse = handleError(error, process.env.NODE_ENV);
|
||||
```
|
||||
|
||||
### Middleware Usage
|
||||
|
||||
These utility functions are integrated into Elysia middleware:
|
||||
|
||||
```typescript
|
||||
const app = new Elysia()
|
||||
.use(rateLimiter) // Rate limiting
|
||||
.use(validateRequest) // Request validation
|
||||
.use(sanitizeInput) // Input sanitization
|
||||
.use(securityHeaders) // Security headers
|
||||
.use(errorHandler) // Error handling
|
||||
```
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. Always validate and sanitize user inputs
|
||||
2. Use rate limiting to prevent abuse
|
||||
3. Apply security headers
|
||||
4. Handle errors gracefully
|
||||
5. Keep environment-specific error handling
|
||||
|
||||
### Security Considerations
|
||||
|
||||
- Configurable rate limits
|
||||
- XSS protection
|
||||
- Content security policies
|
||||
- Token validation
|
||||
- Error information exposure control
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
- Ensure `JWT_SECRET` is set in environment
|
||||
- Check content type in requests
|
||||
- Monitor rate limit errors
|
||||
- Review error handling in different environments
|
||||
68
docs/architecture.md
Normal file
68
docs/architecture.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# Architecture Documentation for MCP Server
|
||||
|
||||
## Overview
|
||||
|
||||
The MCP Server is designed as a high-performance, secure, and scalable bridge between Home Assistant and Language Learning Models (LLMs). This document outlines the architectural design principles, core components, and deployment strategies that power the MCP Server.
|
||||
|
||||
## Key Architectural Components
|
||||
|
||||
### High-Performance Runtime with Bun
|
||||
|
||||
- **Fast Startup & Efficiency:** Powered by Bun, the MCP Server benefits from rapid startup times, efficient memory utilization, and native TypeScript support.
|
||||
- **Optimized Build Process:** Bun's build tools allow for quick iteration and deployment, ensuring minimal downtime and swift performance enhancement.
|
||||
|
||||
### Real-time Communication using Server-Sent Events (SSE)
|
||||
|
||||
- **Continuous Updates:** The server leverages SSE to deliver real-time notifications and updates, ensuring that any changes in Home Assistant are immediately communicated to connected clients.
|
||||
- **Scalable Connection Handling:** SSE provides an event-driven model that efficiently manages multiple simultaneous client connections.
|
||||
|
||||
### Modular & Extensible Design
|
||||
|
||||
- **Plugin Architecture:** Designed with modularity in mind, the MCP Server supports plugins, add-ons, and custom automation scripts, enabling seamless feature expansion without disrupting core functionality.
|
||||
- **Separation of Concerns:** Different components, such as device management, automation control, and system monitoring, are clearly separated, allowing independent development, testing, and scaling.
|
||||
|
||||
### Secure API Integration
|
||||
|
||||
- **Token-Based Authentication:** Robust token-based authentication mechanisms restrict access to authorized users and systems.
|
||||
- **Rate Limiting & Error Handling:** Integrated rate limiting combined with comprehensive error handling ensures system stability and prevents misuse.
|
||||
- **Best Practices:** All API endpoints follow industry-standard security guidelines to protect data and maintain system integrity.
|
||||
|
||||
### Deployment & Scalability
|
||||
|
||||
- **Containerized Deployment with Docker:** The use of Docker Compose enables straightforward deployment, management, and scaling of the server and its dependencies.
|
||||
- **Flexible Environment Configuration:** Environment variables and configuration files (.env) facilitate smooth transitions between development, testing, and production setups.
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- **Advanced Automation Logic:** Integration of more complex automation rules and conditional decision-making capabilities.
|
||||
- **Enhanced Security Measures:** Additional layers of security, such as multi-factor authentication and improved encryption techniques, are on the roadmap.
|
||||
- **Improved Monitoring & Analytics:** Future updates will introduce advanced performance metrics and real-time analytics to monitor system health and user interactions.
|
||||
|
||||
## Conclusion
|
||||
|
||||
The architecture of the MCP Server prioritizes performance, scalability, and security. By leveraging Bun's high-performance runtime, employing real-time communication through SSE, and maintaining a modular, secure design, the MCP Server provides a robust platform for integrating Home Assistant with modern LLM functionalities.
|
||||
|
||||
*This document is a living document and will be updated as the system evolves.*
|
||||
|
||||
## Key Components
|
||||
|
||||
- **API Module:** Handles RESTful endpoints, authentication, and error management.
|
||||
- **SSE Module:** Provides real-time updates through Server-Sent Events.
|
||||
- **Tools Module:** Offers various utilities for device control, automation, and data processing.
|
||||
- **Security Module:** Implements token-based authentication and secure communications.
|
||||
- **Integration Module:** Bridges data between Home Assistant and external systems.
|
||||
|
||||
## Data Flow
|
||||
|
||||
1. Requests enter via the API endpoints.
|
||||
2. Security middleware validates and processes requests.
|
||||
3. Core modules process data and execute the necessary business logic.
|
||||
4. Real-time notifications are managed by the SSE module.
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- Expand modularity with potential microservices.
|
||||
- Enhance security with multi-factor authentication.
|
||||
- Improve scalability through distributed architectures.
|
||||
|
||||
*Further diagrams and detailed breakdowns will be added in future updates.*
|
||||
54
docs/assets/css/style.scss
Normal file
54
docs/assets/css/style.scss
Normal file
@@ -0,0 +1,54 @@
|
||||
@import "{{ site.theme }}";
|
||||
|
||||
// Custom styles
|
||||
.main-nav {
|
||||
margin-top: 20px;
|
||||
|
||||
ul {
|
||||
list-style: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
li {
|
||||
margin-bottom: 8px;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #267CB9;
|
||||
text-decoration: none;
|
||||
|
||||
&:hover {
|
||||
text-decoration: underline;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
h1,
|
||||
h2,
|
||||
h3 {
|
||||
color: #333;
|
||||
}
|
||||
|
||||
code {
|
||||
background-color: #f8f8f8;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 3px;
|
||||
padding: 2px 5px;
|
||||
}
|
||||
|
||||
pre {
|
||||
background-color: #f8f8f8;
|
||||
border: 1px solid #ddd;
|
||||
border-radius: 3px;
|
||||
padding: 10px;
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.wrapper {
|
||||
max-width: 960px;
|
||||
}
|
||||
|
||||
section {
|
||||
max-width: 700px;
|
||||
}
|
||||
36
docs/contributing.md
Normal file
36
docs/contributing.md
Normal file
@@ -0,0 +1,36 @@
|
||||
# Contributing to Home Assistant MCP
|
||||
|
||||
We welcome community contributions to improve the MCP Server. Please review the following guidelines before contributing.
|
||||
|
||||
## How to Contribute
|
||||
|
||||
1. **Fork the Repository:** Create your personal fork on GitHub.
|
||||
2. **Create a Feature Branch:** Use a clear name (e.g., `feature/your-feature` or `bugfix/short-description`).
|
||||
3. **Make Changes:** Develop your feature or fix bugs while following our coding standards.
|
||||
4. **Write Tests:** Include tests for new features or bug fixes.
|
||||
5. **Submit a Pull Request:** Once your changes are complete, submit a PR for review.
|
||||
6. **Address Feedback:** Revise your PR based on maintainers' suggestions.
|
||||
|
||||
## Code Style Guidelines
|
||||
|
||||
- Follow the project's established coding style.
|
||||
- Use Bun tooling for linting and formatting:
|
||||
- `bun run lint`
|
||||
- `bun run format`
|
||||
|
||||
## Documentation
|
||||
|
||||
- Update documentation alongside your code changes.
|
||||
- Ensure tests pass and coverage remains high.
|
||||
|
||||
## Reporting Issues
|
||||
|
||||
- Use the GitHub Issues page to report bugs, request new features, or ask questions.
|
||||
- Provide clear descriptions, replication steps, and any error logs.
|
||||
|
||||
## Community
|
||||
|
||||
- Join our real-time discussions on our chat platforms (Discord, Slack, etc.).
|
||||
- Engage with other contributors to exchange ideas and solutions.
|
||||
|
||||
Thank you for helping improve the Home Assistant MCP project!
|
||||
@@ -7,6 +7,8 @@ This guide provides information for developers who want to contribute to or exte
|
||||
```
|
||||
homeassistant-mcp/
|
||||
├── src/
|
||||
│ ├── __tests__/ # Test files
|
||||
│ ├── __mocks__/ # Mock files
|
||||
│ ├── api/ # API endpoints and route handlers
|
||||
│ ├── config/ # Configuration management
|
||||
│ ├── hass/ # Home Assistant integration
|
||||
@@ -1,122 +1,30 @@
|
||||
# Getting Started with Home Assistant MCP
|
||||
# Getting Started
|
||||
|
||||
This guide will help you get started with the Home Assistant MCP (Master Control Program).
|
||||
Begin your journey with the Home Assistant MCP Server by following these steps:
|
||||
|
||||
## Prerequisites
|
||||
|
||||
Before you begin, ensure you have:
|
||||
|
||||
1. Node.js (v16 or higher)
|
||||
2. A running Home Assistant instance
|
||||
3. A Home Assistant Long-Lived Access Token
|
||||
|
||||
## Installation
|
||||
|
||||
1. Clone the repository:
|
||||
```bash
|
||||
git clone https://github.com/yourusername/homeassistant-mcp.git
|
||||
cd homeassistant-mcp
|
||||
```
|
||||
|
||||
2. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
3. Copy the example environment file:
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
|
||||
4. Edit the `.env` file with your configuration:
|
||||
```env
|
||||
# Server Configuration
|
||||
PORT=3000
|
||||
NODE_ENV=development
|
||||
|
||||
# Home Assistant Configuration
|
||||
HASS_HOST=http://your-hass-instance:8123
|
||||
HASS_TOKEN=your-long-lived-access-token
|
||||
|
||||
# Security Configuration
|
||||
JWT_SECRET=your-secret-key
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `PORT`: The port number for the MCP server (default: 3000)
|
||||
- `NODE_ENV`: The environment mode (development, production, test)
|
||||
- `HASS_HOST`: Your Home Assistant instance URL
|
||||
- `HASS_TOKEN`: Your Home Assistant Long-Lived Access Token
|
||||
- `JWT_SECRET`: Secret key for JWT token generation
|
||||
|
||||
### Development Mode
|
||||
|
||||
For development, you can use:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
This will start the server in development mode with hot reloading.
|
||||
|
||||
### Production Mode
|
||||
|
||||
For production, build and start the server:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
npm start
|
||||
```
|
||||
|
||||
## First Steps
|
||||
|
||||
1. Check the server is running:
|
||||
```bash
|
||||
curl http://localhost:3000/api/health
|
||||
```
|
||||
|
||||
2. List available devices:
|
||||
```bash
|
||||
curl -H "Authorization: Bearer your-token" http://localhost:3000/api/tools/devices
|
||||
```
|
||||
|
||||
3. Subscribe to events:
|
||||
```bash
|
||||
curl -H "Authorization: Bearer your-token" http://localhost:3000/api/sse/subscribe?events=state_changed
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
|
||||
- Read the [API Documentation](./API.md) for available endpoints
|
||||
- Learn about [Server-Sent Events](./SSE_API.md) for real-time updates
|
||||
- Explore available [Tools](./tools/README.md) for device control
|
||||
- Check the [Configuration Guide](./configuration/README.md) for advanced settings
|
||||
- **API Documentation:** Read the [API Documentation](api.md) for available endpoints.
|
||||
- **Real-Time Updates:** Learn about [Server-Sent Events](sse-api.md) for live communication.
|
||||
- **Tools:** Explore available [Tools](tools/tools.md) for device control and automation.
|
||||
- **Configuration:** Refer to the [Configuration Guide](configuration.md) for setup and advanced settings.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you encounter issues:
|
||||
|
||||
1. Verify your Home Assistant instance is accessible
|
||||
2. Check your environment variables are correctly set
|
||||
3. Look for errors in the server logs
|
||||
4. Consult the [Troubleshooting Guide](./troubleshooting.md)
|
||||
If you encounter any issues:
|
||||
1. Verify that your Home Assistant instance is accessible.
|
||||
2. Ensure that all required environment variables are properly set.
|
||||
3. Consult the [Troubleshooting Guide](troubleshooting.md) for additional solutions.
|
||||
|
||||
## Development
|
||||
|
||||
For development and contributing:
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Follow the [Development Guide](./development/README.md)
|
||||
4. Submit a pull request
|
||||
For contributors:
|
||||
1. Fork the repository.
|
||||
2. Create a feature branch.
|
||||
3. Follow the [Development Guide](development/development.md) for contribution guidelines.
|
||||
4. Submit a pull request with your enhancements.
|
||||
|
||||
## Support
|
||||
|
||||
Need help? Check out:
|
||||
|
||||
- [GitHub Issues](https://github.com/yourusername/homeassistant-mcp/issues)
|
||||
- [Troubleshooting Guide](./troubleshooting.md)
|
||||
- [FAQ](./troubleshooting.md#faq)
|
||||
Need help?
|
||||
- Visit our [GitHub Issues](https://github.com/jango-blockchained/homeassistant-mcp/issues).
|
||||
- Review the [Troubleshooting Guide](troubleshooting.md).
|
||||
- Check the [FAQ](troubleshooting.md#faq) for common questions.
|
||||
5
docs/getting-started/configuration.md
Normal file
5
docs/getting-started/configuration.md
Normal file
@@ -0,0 +1,5 @@
|
||||
# Configuration
|
||||
|
||||
## Basic Configuration
|
||||
|
||||
## Advanced Settings
|
||||
124
docs/getting-started/installation.md
Normal file
124
docs/getting-started/installation.md
Normal file
@@ -0,0 +1,124 @@
|
||||
# Installation Guide
|
||||
|
||||
## Prerequisites
|
||||
|
||||
### System Requirements
|
||||
- **Operating System:** Linux, macOS, or Windows (Docker recommended)
|
||||
- **Runtime:** Bun v1.0.26 or higher
|
||||
- **Home Assistant:** v2023.11 or higher
|
||||
- **Minimum Hardware:**
|
||||
- 2 CPU cores
|
||||
- 2GB RAM
|
||||
- 10GB free disk space
|
||||
|
||||
### Software Dependencies
|
||||
- Bun runtime
|
||||
- Docker (optional, recommended for deployment)
|
||||
- Git
|
||||
- Node.js (for some development tasks)
|
||||
|
||||
## Installation Methods
|
||||
|
||||
### 1. Basic Setup
|
||||
|
||||
#### Install Bun
|
||||
```bash
|
||||
curl -fsSL https://bun.sh/install | bash
|
||||
```
|
||||
|
||||
#### Clone Repository
|
||||
```bash
|
||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||
cd homeassistant-mcp
|
||||
```
|
||||
|
||||
#### Install Dependencies
|
||||
```bash
|
||||
bun install
|
||||
```
|
||||
|
||||
#### Configure Environment
|
||||
1. Copy environment template
|
||||
```bash
|
||||
cp .env.example .env
|
||||
```
|
||||
2. Edit `.env` file with your Home Assistant configuration
|
||||
- Set `HASS_HOST`
|
||||
- Configure authentication tokens
|
||||
- Adjust other settings as needed
|
||||
|
||||
#### Build and Start
|
||||
```bash
|
||||
bun run build
|
||||
bun start
|
||||
```
|
||||
|
||||
### 2. Docker Setup (Recommended)
|
||||
|
||||
#### Prerequisites
|
||||
- Docker
|
||||
- Docker Compose
|
||||
|
||||
#### Deployment Steps
|
||||
```bash
|
||||
# Clone repository
|
||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||
cd homeassistant-mcp
|
||||
|
||||
# Configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env file with your settings
|
||||
|
||||
# Deploy with Docker Compose
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
### 3. Home Assistant Add-on (Coming Soon)
|
||||
We're working on a direct Home Assistant add-on for even easier installation.
|
||||
|
||||
## Verification
|
||||
|
||||
### Check Installation
|
||||
- Web Interface: [http://localhost:3000](http://localhost:3000)
|
||||
- Logs: `docker compose logs` or check `logs/` directory
|
||||
|
||||
### Troubleshooting
|
||||
- Ensure all environment variables are correctly set
|
||||
- Check network connectivity to Home Assistant
|
||||
- Verify authentication tokens
|
||||
|
||||
## Updating
|
||||
|
||||
### Basic Setup
|
||||
```bash
|
||||
git pull
|
||||
bun install
|
||||
bun run build
|
||||
bun start
|
||||
```
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
git pull
|
||||
docker compose up -d --build
|
||||
```
|
||||
|
||||
## Uninstallation
|
||||
|
||||
### Basic Setup
|
||||
```bash
|
||||
cd homeassistant-mcp
|
||||
bun stop # Stop the application
|
||||
rm -rf node_modules dist
|
||||
```
|
||||
|
||||
### Docker
|
||||
```bash
|
||||
docker compose down
|
||||
docker rmi homeassistant-mcp # Remove image
|
||||
```
|
||||
|
||||
## Next Steps
|
||||
- [Configuration Guide](configuration.md)
|
||||
- [Usage Instructions](../usage.md)
|
||||
- [Troubleshooting](../troubleshooting.md)
|
||||
110
docs/index.md
Normal file
110
docs/index.md
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
layout: default
|
||||
title: Home
|
||||
nav_order: 1
|
||||
---
|
||||
|
||||
# 📚 Home Assistant MCP Documentation
|
||||
|
||||
Welcome to the documentation for the Home Assistant MCP (Model Context Protocol) Server.
|
||||
|
||||
## 📑 Documentation Index
|
||||
|
||||
- [Getting Started Guide](getting-started.md)
|
||||
- [API Documentation](api.md)
|
||||
- [Troubleshooting](troubleshooting.md)
|
||||
- [Contributing Guide](contributing.md)
|
||||
|
||||
For project overview, installation, and general information, please see our [main README](../README.md).
|
||||
|
||||
## 🔗 Quick Links
|
||||
|
||||
- [GitHub Repository](https://github.com/jango-blockchained/homeassistant-mcp)
|
||||
- [Issue Tracker](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||
- [GitHub Discussions](https://github.com/jango-blockchained/homeassistant-mcp/discussions)
|
||||
|
||||
## 📝 License
|
||||
|
||||
This project is licensed under the MIT License. See [LICENSE](../LICENSE) for details.
|
||||
|
||||
# Model Context Protocol (MCP) Server
|
||||
|
||||
## Overview
|
||||
|
||||
The Model Context Protocol (MCP) Server is a cutting-edge bridge between Home Assistant and Language Learning Models (LLMs), designed to revolutionize smart home automation and control. This documentation provides comprehensive information about setting up, configuring, and using the Home Assistant MCP.
|
||||
|
||||
## Key Features
|
||||
|
||||
### 🏠 Smart Home Integration
|
||||
- Natural language control of smart devices
|
||||
- Real-time device state monitoring
|
||||
- Advanced automation capabilities
|
||||
|
||||
### 🤖 LLM Powered Interactions
|
||||
- Intuitive voice and text-based commands
|
||||
- Context-aware device management
|
||||
- Intelligent automation suggestions
|
||||
|
||||
### 🔒 Security & Performance
|
||||
- Token-based authentication
|
||||
- High-performance Bun runtime
|
||||
- Secure, real-time communication protocols
|
||||
|
||||
## Documentation
|
||||
|
||||
### Core Documentation
|
||||
1. [Getting Started](getting-started.md)
|
||||
- Installation and basic setup
|
||||
- Configuration
|
||||
- First Steps
|
||||
|
||||
2. [API Reference](api.md)
|
||||
- REST API Endpoints
|
||||
- Authentication
|
||||
- Error Handling
|
||||
|
||||
3. [SSE API](sse-api.md)
|
||||
- Event Subscriptions
|
||||
- Real-time Updates
|
||||
- Connection Management
|
||||
|
||||
### Advanced Topics
|
||||
4. [Architecture](architecture.md)
|
||||
- System Design
|
||||
- Components
|
||||
- Data Flow
|
||||
|
||||
5. [Configuration](getting-started.md#configuration)
|
||||
- Environment Variables
|
||||
- Security Settings
|
||||
- Performance Tuning
|
||||
|
||||
6. [Development Guide](development/development.md)
|
||||
- Project Structure
|
||||
- Contributing Guidelines
|
||||
- Testing
|
||||
|
||||
7. [Troubleshooting](troubleshooting.md)
|
||||
- Common Issues
|
||||
- Debugging
|
||||
- FAQ
|
||||
|
||||
## Quick Links
|
||||
|
||||
- [GitHub Repository](https://github.com/jango-blockchained/homeassistant-mcp)
|
||||
- [Issue Tracker](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||
- [Contributing Guide](contributing.md)
|
||||
- [Roadmap](roadmap.md)
|
||||
|
||||
## Community and Support
|
||||
|
||||
If you need help or have questions:
|
||||
|
||||
1. Check the [Troubleshooting Guide](troubleshooting.md)
|
||||
2. Search existing [Issues](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||
3. Join our [GitHub Discussions](https://github.com/jango-blockchained/homeassistant-mcp/discussions)
|
||||
4. Create a new issue if your problem isn't already reported
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License. See [LICENSE](https://github.com/jango-blockchained/homeassistant-mcp/blob/main/LICENSE) for details.
|
||||
51
docs/roadmap.md
Normal file
51
docs/roadmap.md
Normal file
@@ -0,0 +1,51 @@
|
||||
# Roadmap for MCP Server
|
||||
|
||||
The following roadmap outlines our planned enhancements and future directions for the Home Assistant MCP Server. This document is a living guide that will be updated as new features are planned and developed.
|
||||
|
||||
## Near-Term Goals
|
||||
|
||||
- **Advanced Automation Capabilities:**
|
||||
- Integrate sophisticated automation rules with conditional logic and multi-step execution.
|
||||
- Introduce a visual automation builder for simplified rule creation.
|
||||
|
||||
- **Enhanced Security Features:**
|
||||
- Implement multi-factor authentication for critical actions.
|
||||
- Strengthen encryption methods and data handling practices.
|
||||
- Expand monitoring and alerting for potential security breaches.
|
||||
|
||||
- **Performance Optimizations:**
|
||||
- Refine resource utilization to reduce latency.
|
||||
- Optimize real-time data streaming via SSE.
|
||||
- Introduce advanced caching mechanisms for frequently requested data.
|
||||
|
||||
## Mid-Term Goals
|
||||
|
||||
- **User Interface Improvements:**
|
||||
- Develop an intuitive web-based dashboard for device management and monitoring.
|
||||
- Provide real-time analytics and performance metrics.
|
||||
|
||||
- **Expanded Integrations:**
|
||||
- Support a broader range of smart home devices and brands.
|
||||
- Integrate with additional home automation platforms and third-party services.
|
||||
|
||||
- **Developer Experience Enhancements:**
|
||||
- Improve documentation and developer tooling.
|
||||
- Streamline contribution guidelines and testing setups.
|
||||
|
||||
## Long-Term Vision
|
||||
|
||||
- **Ecosystem Expansion:**
|
||||
- Build a modular plugin system for community-driven extensions and integrations.
|
||||
- Enable seamless integration with future technologies in smart home and AI domains.
|
||||
|
||||
- **Scalability and Resilience:**
|
||||
- Architect the system to support large-scale deployments.
|
||||
- Incorporate advanced load balancing and failover mechanisms.
|
||||
|
||||
## How to Follow the Roadmap
|
||||
|
||||
- **Community Involvement:** We welcome and encourage feedback.
|
||||
- **Regular Updates:** This document is updated regularly with new goals and milestones.
|
||||
- **Transparency:** Check our GitHub repository and issue tracker for ongoing discussions.
|
||||
|
||||
*This roadmap is intended as a guide and may evolve based on community needs, technological advancements, and strategic priorities.*
|
||||
422
docs/testing.md
Normal file
422
docs/testing.md
Normal file
@@ -0,0 +1,422 @@
|
||||
# Testing Documentation
|
||||
|
||||
## Quick Reference
|
||||
|
||||
```bash
|
||||
# Most Common Commands
|
||||
bun test # Run all tests
|
||||
bun test --watch # Run tests in watch mode
|
||||
bun test --coverage # Run tests with coverage
|
||||
bun test path/to/test.ts # Run a specific test file
|
||||
|
||||
# Additional Options
|
||||
DEBUG=true bun test # Run with debug output
|
||||
bun test --pattern "auth" # Run tests matching a pattern
|
||||
bun test --timeout 60000 # Run with a custom timeout
|
||||
```
|
||||
|
||||
## Overview
|
||||
|
||||
This document describes the testing setup and practices used in the Home Assistant MCP project. We use Bun's test runner for both unit and integration testing, ensuring comprehensive coverage across modules.
|
||||
|
||||
## Test Structure
|
||||
|
||||
Tests are organized in two main locations:
|
||||
|
||||
1. **Root Level Integration Tests** (`/__tests__/`):
|
||||
|
||||
```
|
||||
__tests__/
|
||||
├── ai/ # AI/ML component tests
|
||||
├── api/ # API integration tests
|
||||
├── context/ # Context management tests
|
||||
├── hass/ # Home Assistant integration tests
|
||||
├── schemas/ # Schema validation tests
|
||||
├── security/ # Security integration tests
|
||||
├── tools/ # Tools and utilities tests
|
||||
├── websocket/ # WebSocket integration tests
|
||||
├── helpers.test.ts # Helper function tests
|
||||
├── index.test.ts # Main application tests
|
||||
└── server.test.ts # Server integration tests
|
||||
```
|
||||
|
||||
2. **Component Level Unit Tests** (`src/**/`):
|
||||
|
||||
```
|
||||
src/
|
||||
├── __tests__/ # Global test setup and utilities
|
||||
│ └── setup.ts # Global test configuration
|
||||
├── component/
|
||||
│ ├── __tests__/ # Component-specific unit tests
|
||||
│ └── component.ts
|
||||
```
|
||||
|
||||
## Test Configuration
|
||||
|
||||
### Bun Test Configuration (`bunfig.toml`)
|
||||
|
||||
```toml
|
||||
[test]
|
||||
preload = ["./src/__tests__/setup.ts"] # Global test setup
|
||||
coverage = true # Enable coverage by default
|
||||
timeout = 30000 # Test timeout in milliseconds
|
||||
testMatch = ["**/__tests__/**/*.test.ts"] # Test file patterns
|
||||
```
|
||||
|
||||
### Bun Scripts
|
||||
|
||||
Available test commands in `package.json`:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Watch mode for development
|
||||
bun test --watch
|
||||
|
||||
# Generate coverage report
|
||||
bun test --coverage
|
||||
|
||||
# Run linting
|
||||
bun run lint
|
||||
|
||||
# Format code
|
||||
bun run format
|
||||
```
|
||||
|
||||
## Test Setup
|
||||
|
||||
### Global Configuration
|
||||
|
||||
A global test setup file (`src/__tests__/setup.ts`) provides:
|
||||
- Environment configuration
|
||||
- Mock utilities
|
||||
- Test helper functions
|
||||
- Global lifecycle hooks
|
||||
|
||||
### Test Environment
|
||||
|
||||
- Environment variables are loaded from `.env.test`.
|
||||
- Console output is minimized unless `DEBUG=true`.
|
||||
- JWT secrets and tokens are preconfigured for testing.
|
||||
- Rate limiting and security features are initialized appropriately.
|
||||
|
||||
## Running Tests
|
||||
|
||||
```bash
|
||||
# Basic test run
|
||||
bun test
|
||||
|
||||
# Run tests with coverage
|
||||
bun test --coverage
|
||||
|
||||
# Run a specific test file
|
||||
bun test path/to/test.test.ts
|
||||
|
||||
# Run tests in watch mode
|
||||
bun test --watch
|
||||
|
||||
# Run tests with debug output
|
||||
DEBUG=true bun test
|
||||
|
||||
# Run tests with increased timeout
|
||||
bun test --timeout 60000
|
||||
|
||||
# Run tests matching a pattern
|
||||
bun test --pattern "auth"
|
||||
```
|
||||
|
||||
## Advanced Debugging
|
||||
|
||||
### Using Node Inspector
|
||||
|
||||
```bash
|
||||
# Start tests with inspector
|
||||
bun test --inspect
|
||||
|
||||
# Start tests with inspector and break on first line
|
||||
bun test --inspect-brk
|
||||
```
|
||||
|
||||
### Using VS Code
|
||||
|
||||
Create a launch configuration in `.vscode/launch.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "launch",
|
||||
"name": "Debug Tests",
|
||||
"program": "${workspaceFolder}/node_modules/bun/bin/bun",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": { "DEBUG": "true" }
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
### Test Isolation
|
||||
|
||||
To run a single test in isolation:
|
||||
|
||||
```typescript
|
||||
describe.only("specific test suite", () => {
|
||||
it.only("specific test case", () => {
|
||||
// Only this test will run
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Writing Tests
|
||||
|
||||
### Test File Naming
|
||||
|
||||
- Place test files in a `__tests__` directory adjacent to the code being tested.
|
||||
- Name files with the pattern `*.test.ts`.
|
||||
- Mirror the structure of the source code in your test organization.
|
||||
|
||||
### Example Test Structure
|
||||
|
||||
```typescript
|
||||
describe("Security Features", () => {
|
||||
it("should validate tokens correctly", () => {
|
||||
const payload = { userId: "123", role: "user" };
|
||||
const token = jwt.sign(payload, validSecret, { expiresIn: "1h" });
|
||||
const result = TokenManager.validateToken(token, testIp);
|
||||
expect(result.valid).toBe(true);
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Coverage
|
||||
|
||||
The project maintains strict coverage:
|
||||
- Overall coverage: at least 80%
|
||||
- Critical paths: 90%+
|
||||
- New features: ≥85% coverage
|
||||
|
||||
Generate a coverage report with:
|
||||
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Security Middleware Testing
|
||||
|
||||
### Utility Function Testing
|
||||
|
||||
The security middleware now uses a utility-first approach, which allows for more granular and comprehensive testing. Each security function is now independently testable, improving code reliability and maintainability.
|
||||
|
||||
#### Key Utility Functions
|
||||
|
||||
1. **Rate Limiting (`checkRateLimit`)**
|
||||
- Tests multiple scenarios:
|
||||
- Requests under threshold
|
||||
- Requests exceeding threshold
|
||||
- Rate limit reset after window expiration
|
||||
|
||||
```typescript
|
||||
// Example test
|
||||
it('should throw when requests exceed threshold', () => {
|
||||
const ip = '127.0.0.2';
|
||||
for (let i = 0; i < 11; i++) {
|
||||
if (i < 10) {
|
||||
expect(() => checkRateLimit(ip, 10)).not.toThrow();
|
||||
} else {
|
||||
expect(() => checkRateLimit(ip, 10)).toThrow('Too many requests from this IP');
|
||||
}
|
||||
}
|
||||
});
|
||||
```
|
||||
|
||||
2. **Request Validation (`validateRequestHeaders`)**
|
||||
- Tests content type validation
|
||||
- Checks request size limits
|
||||
- Validates authorization headers
|
||||
|
||||
```typescript
|
||||
it('should reject invalid content type', () => {
|
||||
const mockRequest = new Request('http://localhost', {
|
||||
method: 'POST',
|
||||
headers: { 'content-type': 'text/plain' }
|
||||
});
|
||||
expect(() => validateRequestHeaders(mockRequest)).toThrow('Content-Type must be application/json');
|
||||
});
|
||||
```
|
||||
|
||||
3. **Input Sanitization (`sanitizeValue`)**
|
||||
- Sanitizes HTML tags
|
||||
- Handles nested objects
|
||||
- Preserves non-string values
|
||||
|
||||
```typescript
|
||||
it('should sanitize HTML tags', () => {
|
||||
const input = '<script>alert("xss")</script>Hello';
|
||||
const sanitized = sanitizeValue(input);
|
||||
expect(sanitized).toBe('<script>alert("xss")</script>Hello');
|
||||
});
|
||||
```
|
||||
|
||||
4. **Security Headers (`applySecurityHeaders`)**
|
||||
- Verifies correct security header application
|
||||
- Checks CSP, frame options, and other security headers
|
||||
|
||||
```typescript
|
||||
it('should apply security headers', () => {
|
||||
const mockRequest = new Request('http://localhost');
|
||||
const headers = applySecurityHeaders(mockRequest);
|
||||
expect(headers['content-security-policy']).toBeDefined();
|
||||
expect(headers['x-frame-options']).toBeDefined();
|
||||
});
|
||||
```
|
||||
|
||||
5. **Error Handling (`handleError`)**
|
||||
- Tests error responses in production and development modes
|
||||
- Verifies error message and stack trace inclusion
|
||||
|
||||
```typescript
|
||||
it('should include error details in development mode', () => {
|
||||
const error = new Error('Test error');
|
||||
const result = handleError(error, 'development');
|
||||
expect(result).toEqual({
|
||||
error: true,
|
||||
message: 'Internal server error',
|
||||
error: 'Test error',
|
||||
stack: expect.any(String)
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
### Testing Philosophy
|
||||
|
||||
- **Isolation**: Each utility function is tested independently
|
||||
- **Comprehensive Coverage**: Multiple scenarios for each function
|
||||
- **Predictable Behavior**: Clear expectations for input and output
|
||||
- **Error Handling**: Robust testing of error conditions
|
||||
|
||||
### Best Practices
|
||||
|
||||
1. Use minimal, focused test cases
|
||||
2. Test both successful and failure scenarios
|
||||
3. Verify input sanitization and security measures
|
||||
4. Mock external dependencies when necessary
|
||||
|
||||
### Running Security Tests
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
bun test
|
||||
|
||||
# Run specific security tests
|
||||
bun test __tests__/security/
|
||||
```
|
||||
|
||||
### Continuous Improvement
|
||||
|
||||
- Regularly update test cases
|
||||
- Add new test scenarios as security requirements evolve
|
||||
- Perform periodic security audits
|
||||
|
||||
## Best Practices
|
||||
|
||||
1. **Isolation**: Each test should be independent and not rely on the state of other tests.
|
||||
2. **Mocking**: Use the provided mock utilities for external dependencies.
|
||||
3. **Cleanup**: Clean up any resources or state modifications in `afterEach` or `afterAll` hooks.
|
||||
4. **Descriptive Names**: Use clear, descriptive test names that explain the expected behavior.
|
||||
5. **Assertions**: Make specific, meaningful assertions rather than general ones.
|
||||
6. **Setup**: Use `beforeEach` for common test setup to avoid repetition.
|
||||
7. **Error Cases**: Test both success and error cases for complete coverage.
|
||||
|
||||
## Coverage
|
||||
|
||||
The project aims for high test coverage, particularly focusing on:
|
||||
- Security-critical code paths
|
||||
- API endpoints
|
||||
- Data validation
|
||||
- Error handling
|
||||
- Event broadcasting
|
||||
|
||||
Run coverage reports using:
|
||||
```bash
|
||||
bun test --coverage
|
||||
```
|
||||
|
||||
## Debugging Tests
|
||||
|
||||
To debug tests:
|
||||
1. Set `DEBUG=true` to enable console output during tests
|
||||
2. Use the `--watch` flag for development
|
||||
3. Add `console.log()` statements (they're only shown when DEBUG is true)
|
||||
4. Use the test utilities' debugging helpers
|
||||
|
||||
### Advanced Debugging
|
||||
|
||||
1. **Using Node Inspector**:
|
||||
```bash
|
||||
# Start tests with inspector
|
||||
bun test --inspect
|
||||
|
||||
# Start tests with inspector and break on first line
|
||||
bun test --inspect-brk
|
||||
```
|
||||
|
||||
2. **Using VS Code**:
|
||||
```jsonc
|
||||
// .vscode/launch.json
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "bun",
|
||||
"request": "launch",
|
||||
"name": "Debug Tests",
|
||||
"program": "${workspaceFolder}/node_modules/bun/bin/bun",
|
||||
"args": ["test", "${file}"],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": { "DEBUG": "true" }
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
3. **Test Isolation**:
|
||||
To run a single test in isolation:
|
||||
```typescript
|
||||
describe.only("specific test suite", () => {
|
||||
it.only("specific test case", () => {
|
||||
// Only this test will run
|
||||
});
|
||||
});
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
When contributing new code:
|
||||
1. Add tests for new features
|
||||
2. Ensure existing tests pass
|
||||
3. Maintain or improve coverage
|
||||
4. Follow the existing test patterns and naming conventions
|
||||
5. Document any new test utilities or patterns
|
||||
|
||||
## Coverage Requirements
|
||||
|
||||
The project maintains strict coverage requirements:
|
||||
|
||||
- Minimum overall coverage: 80%
|
||||
- Critical paths (security, API, data validation): 90%
|
||||
- New features must include tests with >= 85% coverage
|
||||
|
||||
Coverage reports are generated in multiple formats:
|
||||
- Console summary
|
||||
- HTML report (./coverage/index.html)
|
||||
- LCOV report (./coverage/lcov.info)
|
||||
|
||||
To view detailed coverage:
|
||||
```bash
|
||||
# Generate and open coverage report
|
||||
bun test --coverage && open coverage/index.html
|
||||
```
|
||||
@@ -1,144 +1,135 @@
|
||||
# Troubleshooting Guide
|
||||
|
||||
This guide helps you diagnose and fix common issues with the Home Assistant MCP.
|
||||
This guide provides solutions to common issues encountered with the Home Assistant MCP Server.
|
||||
|
||||
## Common Issues
|
||||
|
||||
### Connection Issues
|
||||
- **Server Not Starting:**
|
||||
- Verify that all required environment variables are correctly set.
|
||||
- Check for port conflicts or missing dependencies.
|
||||
- Review the server logs for error details.
|
||||
|
||||
#### Cannot Connect to Home Assistant
|
||||
- **Connection Problems:**
|
||||
- Ensure your Home Assistant instance is reachable.
|
||||
- Confirm that the authentication token is valid.
|
||||
- Check network configurations and firewalls.
|
||||
|
||||
## Tool Issues
|
||||
|
||||
### Tool Not Found
|
||||
|
||||
**Symptoms:**
|
||||
- Connection timeout errors
|
||||
- "Failed to connect to Home Assistant" messages
|
||||
- 401 Unauthorized errors
|
||||
- "Tool not found" errors or 404 responses.
|
||||
|
||||
**Solutions:**
|
||||
1. Verify Home Assistant is running
|
||||
2. Check HASS_HOST environment variable
|
||||
3. Validate HASS_TOKEN is correct
|
||||
4. Ensure network connectivity
|
||||
5. Check firewall settings
|
||||
- Double-check the tool name spelling.
|
||||
- Verify that the tool is correctly registered.
|
||||
- Review tool imports and documentation.
|
||||
|
||||
#### SSE Connection Drops
|
||||
### Tool Execution Failures
|
||||
|
||||
**Symptoms:**
|
||||
- Frequent disconnections
|
||||
- Missing events
|
||||
- Connection reset errors
|
||||
- Execution errors or timeouts.
|
||||
|
||||
**Solutions:**
|
||||
1. Check network stability
|
||||
2. Increase connection timeout
|
||||
3. Implement reconnection logic
|
||||
4. Monitor server resources
|
||||
- Validate input parameters.
|
||||
- Check and review error logs.
|
||||
- Debug the tool implementation.
|
||||
- Ensure proper permissions in Home Assistant.
|
||||
|
||||
### Authentication Issues
|
||||
|
||||
#### Invalid Token
|
||||
|
||||
**Symptoms:**
|
||||
- 401 Unauthorized responses
|
||||
- "Invalid token" messages
|
||||
- Authentication failures
|
||||
|
||||
**Solutions:**
|
||||
1. Generate new Long-Lived Access Token
|
||||
2. Check token expiration
|
||||
3. Verify token format
|
||||
4. Update environment variables
|
||||
|
||||
#### Rate Limiting
|
||||
|
||||
**Symptoms:**
|
||||
- 429 Too Many Requests
|
||||
- "Rate limit exceeded" messages
|
||||
|
||||
**Solutions:**
|
||||
1. Implement request throttling
|
||||
2. Adjust rate limit settings
|
||||
3. Cache responses
|
||||
4. Optimize request patterns
|
||||
|
||||
### Tool Issues
|
||||
|
||||
#### Tool Not Found
|
||||
|
||||
**Symptoms:**
|
||||
- "Tool not found" errors
|
||||
- 404 Not Found responses
|
||||
|
||||
**Solutions:**
|
||||
1. Check tool name spelling
|
||||
2. Verify tool registration
|
||||
3. Update tool imports
|
||||
4. Check tool availability
|
||||
|
||||
#### Tool Execution Fails
|
||||
|
||||
**Symptoms:**
|
||||
- Tool execution errors
|
||||
- Unexpected responses
|
||||
- Timeout issues
|
||||
|
||||
**Solutions:**
|
||||
1. Validate input parameters
|
||||
2. Check error logs
|
||||
3. Debug tool implementation
|
||||
4. Verify Home Assistant permissions
|
||||
|
||||
## Debugging
|
||||
## Debugging Steps
|
||||
|
||||
### Server Logs
|
||||
|
||||
1. Enable debug logging:
|
||||
1. Enable debug logging by setting:
|
||||
```env
|
||||
LOG_LEVEL=debug
|
||||
```
|
||||
|
||||
2. Check logs:
|
||||
```bash
|
||||
npm run logs
|
||||
```
|
||||
|
||||
3. Filter logs:
|
||||
3. Filter errors:
|
||||
```bash
|
||||
npm run logs | grep "error"
|
||||
```
|
||||
|
||||
### Network Debugging
|
||||
|
||||
1. Check API endpoints:
|
||||
1. Test API endpoints:
|
||||
```bash
|
||||
curl -v http://localhost:3000/api/health
|
||||
```
|
||||
|
||||
2. Monitor SSE connections:
|
||||
```bash
|
||||
curl -N http://localhost:3000/api/sse/stats
|
||||
```
|
||||
|
||||
3. Test WebSocket:
|
||||
3. Test WebSocket connectivity:
|
||||
```bash
|
||||
wscat -c ws://localhost:3000
|
||||
```
|
||||
|
||||
### Performance Issues
|
||||
|
||||
1. Monitor memory usage:
|
||||
- Monitor memory usage with:
|
||||
```bash
|
||||
npm run stats
|
||||
```
|
||||
|
||||
2. Check response times:
|
||||
```bash
|
||||
curl -w "%{time_total}\n" -o /dev/null -s http://localhost:3000/api/health
|
||||
```
|
||||
## Security Middleware Troubleshooting
|
||||
|
||||
3. Profile code:
|
||||
```bash
|
||||
npm run profile
|
||||
```
|
||||
### Rate Limiting Problems
|
||||
|
||||
**Symptoms:** Receiving 429 (Too Many Requests) errors.
|
||||
|
||||
**Solutions:**
|
||||
- Adjust and fine-tune rate limit settings.
|
||||
- Consider different limits for critical versus non-critical endpoints.
|
||||
|
||||
### Request Validation Failures
|
||||
|
||||
**Symptoms:** 400 or 415 errors on valid requests.
|
||||
|
||||
**Solutions:**
|
||||
- Verify that the `Content-Type` header is set correctly.
|
||||
- Inspect request payload size and format.
|
||||
|
||||
### Input Sanitization Issues
|
||||
|
||||
**Symptoms:** Unexpected data transformation or loss.
|
||||
|
||||
**Solutions:**
|
||||
- Test sanitization with various input types.
|
||||
- Implement custom sanitization for complex data if needed.
|
||||
|
||||
### Security Header Configuration
|
||||
|
||||
**Symptoms:** Missing or improper security headers.
|
||||
|
||||
**Solutions:**
|
||||
- Review and update security header configurations (e.g., Helmet settings).
|
||||
- Ensure environment-specific header settings are in place.
|
||||
|
||||
### Error Handling and Logging
|
||||
|
||||
**Symptoms:** Inconsistent error responses.
|
||||
|
||||
**Solutions:**
|
||||
- Enhance logging for detailed error tracking.
|
||||
- Adjust error handlers for production and development differences.
|
||||
|
||||
## Additional Resources
|
||||
|
||||
- [OWASP Security Guidelines](https://owasp.org/www-project-top-ten/)
|
||||
- [Helmet.js Documentation](https://helmetjs.github.io/)
|
||||
- [JWT Security Best Practices](https://jwt.io/introduction)
|
||||
|
||||
## Getting Help
|
||||
|
||||
If issues persist:
|
||||
1. Review detailed logs.
|
||||
2. Verify your configuration and environment.
|
||||
3. Consult the GitHub issue tracker or community forums.
|
||||
|
||||
## FAQ
|
||||
|
||||
@@ -167,7 +158,7 @@ A: Adjust SSE_MAX_CLIENTS in configuration or clean up stale connections.
|
||||
1. Documentation
|
||||
- [API Reference](./API.md)
|
||||
- [Configuration Guide](./configuration/README.md)
|
||||
- [Development Guide](./development/README.md)
|
||||
- [Development Guide](./development/development.md)
|
||||
|
||||
2. Community
|
||||
- GitHub Issues
|
||||
@@ -191,3 +182,164 @@ A: Adjust SSE_MAX_CLIENTS in configuration or clean up stale connections.
|
||||
- GitHub Issues
|
||||
- Email Support
|
||||
- Community Forums
|
||||
|
||||
## Security Middleware Troubleshooting
|
||||
|
||||
### Common Issues and Solutions
|
||||
|
||||
#### Rate Limiting Problems
|
||||
|
||||
**Symptom**: Unexpected 429 (Too Many Requests) errors
|
||||
|
||||
**Possible Causes**:
|
||||
- Misconfigured rate limit settings
|
||||
- Shared IP addresses (e.g., behind NAT)
|
||||
- Aggressive client-side retry mechanisms
|
||||
|
||||
**Solutions**:
|
||||
1. Adjust rate limit parameters
|
||||
```typescript
|
||||
// Customize rate limit for specific scenarios
|
||||
checkRateLimit(ip, maxRequests = 200, windowMs = 30 * 60 * 1000)
|
||||
```
|
||||
|
||||
2. Implement more granular rate limiting
|
||||
- Use different limits for different endpoints
|
||||
- Consider user authentication level
|
||||
|
||||
#### Request Validation Failures
|
||||
|
||||
**Symptom**: 400 or 415 status codes on valid requests
|
||||
|
||||
**Possible Causes**:
|
||||
- Incorrect `Content-Type` header
|
||||
- Large request payloads
|
||||
- Malformed authorization headers
|
||||
|
||||
**Debugging Steps**:
|
||||
1. Verify request headers
|
||||
```typescript
|
||||
// Check content type and size
|
||||
validateRequestHeaders(request, 'application/json')
|
||||
```
|
||||
|
||||
2. Log detailed validation errors
|
||||
```typescript
|
||||
try {
|
||||
validateRequestHeaders(request);
|
||||
} catch (error) {
|
||||
console.error('Request validation failed:', error.message);
|
||||
}
|
||||
```
|
||||
|
||||
#### Input Sanitization Issues
|
||||
|
||||
**Symptom**: Unexpected data transformation or loss
|
||||
|
||||
**Possible Causes**:
|
||||
- Complex nested objects
|
||||
- Non-standard input formats
|
||||
- Overly aggressive sanitization
|
||||
|
||||
**Troubleshooting**:
|
||||
1. Test sanitization with various input types
|
||||
```typescript
|
||||
const input = {
|
||||
text: '<script>alert("xss")</script>',
|
||||
nested: { html: '<img src="x" onerror="alert(1)">World' }
|
||||
};
|
||||
const sanitized = sanitizeValue(input);
|
||||
```
|
||||
|
||||
2. Custom sanitization for specific use cases
|
||||
```typescript
|
||||
function customSanitize(value) {
|
||||
// Add custom sanitization logic
|
||||
return sanitizeValue(value);
|
||||
}
|
||||
```
|
||||
|
||||
#### Security Header Configuration
|
||||
|
||||
**Symptom**: Missing or incorrect security headers
|
||||
|
||||
**Possible Causes**:
|
||||
- Misconfigured Helmet options
|
||||
- Environment-specific header requirements
|
||||
|
||||
**Solutions**:
|
||||
1. Custom security header configuration
|
||||
```typescript
|
||||
const customHelmetConfig = {
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", 'trusted-cdn.com']
|
||||
}
|
||||
}
|
||||
};
|
||||
applySecurityHeaders(request, customHelmetConfig);
|
||||
```
|
||||
|
||||
#### Error Handling and Logging
|
||||
|
||||
**Symptom**: Inconsistent error responses
|
||||
|
||||
**Possible Causes**:
|
||||
- Incorrect environment configuration
|
||||
- Unhandled error types
|
||||
|
||||
**Debugging Techniques**:
|
||||
1. Verify environment settings
|
||||
```typescript
|
||||
const errorResponse = handleError(error, process.env.NODE_ENV);
|
||||
```
|
||||
|
||||
2. Add custom error handling
|
||||
```typescript
|
||||
function enhancedErrorHandler(error, env) {
|
||||
// Add custom logging or monitoring
|
||||
console.error('Security error:', error);
|
||||
return handleError(error, env);
|
||||
}
|
||||
```
|
||||
|
||||
### Performance and Security Monitoring
|
||||
|
||||
1. **Logging**
|
||||
- Enable debug logging for security events
|
||||
- Monitor rate limit and validation logs
|
||||
|
||||
2. **Metrics**
|
||||
- Track rate limit hit rates
|
||||
- Monitor request validation success/failure ratios
|
||||
|
||||
3. **Continuous Improvement**
|
||||
- Regularly review and update security configurations
|
||||
- Conduct periodic security audits
|
||||
|
||||
### Environment-Specific Considerations
|
||||
|
||||
#### Development
|
||||
- More verbose error messages
|
||||
- Relaxed rate limiting
|
||||
- Detailed security logs
|
||||
|
||||
#### Production
|
||||
- Minimal error details
|
||||
- Strict rate limiting
|
||||
- Comprehensive security headers
|
||||
|
||||
### External Resources
|
||||
|
||||
- [OWASP Security Guidelines](https://owasp.org/www-project-top-ten/)
|
||||
- [Helmet.js Documentation](https://helmetjs.github.io/)
|
||||
- [JWT Security Best Practices](https://jwt.io/introduction)
|
||||
|
||||
### Getting Help
|
||||
|
||||
If you encounter persistent issues:
|
||||
1. Check application logs
|
||||
2. Verify environment configurations
|
||||
3. Consult the project's issue tracker
|
||||
4. Reach out to the development team with detailed error information
|
||||
34
docs/usage.md
Normal file
34
docs/usage.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Usage Guide
|
||||
|
||||
This guide explains how to use the Home Assistant MCP Server for smart home device management and integration with language learning systems.
|
||||
|
||||
## Basic Usage
|
||||
|
||||
1. **Starting the Server:**
|
||||
- For development: run `npm run dev`.
|
||||
- For production: run `npm run build` followed by `npm start`.
|
||||
|
||||
2. **Accessing the Web Interface:**
|
||||
- Open [http://localhost:3000](http://localhost:3000) in your browser.
|
||||
|
||||
3. **Real-Time Updates:**
|
||||
- Connect to the SSE endpoint at `/subscribe_events?token=YOUR_TOKEN&domain=light` to receive live updates.
|
||||
|
||||
## Advanced Features
|
||||
|
||||
1. **API Interactions:**
|
||||
- Use the REST API for operations such as device control, automation, and add-on management.
|
||||
- See [API Documentation](api.md) for details.
|
||||
|
||||
2. **Tool Integrations:**
|
||||
- Multiple tools are available (see [Tools Documentation](tools/tools.md)), for tasks like automation management and notifications.
|
||||
|
||||
3. **Security Settings:**
|
||||
- Configure token-based authentication and environment variables as per the [Configuration Guide](getting-started/configuration.md).
|
||||
|
||||
4. **Customization and Extensions:**
|
||||
- Extend server functionality by developing new tools as outlined in the [Development Guide](development/development.md).
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you experience issues, review the [Troubleshooting Guide](troubleshooting.md).
|
||||
91
examples/README.md
Normal file
91
examples/README.md
Normal file
@@ -0,0 +1,91 @@
|
||||
# Speech-to-Text Examples
|
||||
|
||||
This directory contains examples demonstrating how to use the speech-to-text integration with wake word detection.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
1. Make sure you have Docker installed and running
|
||||
2. Build and start the services:
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
## Running the Example
|
||||
|
||||
1. Install dependencies:
|
||||
```bash
|
||||
npm install
|
||||
```
|
||||
|
||||
2. Run the example:
|
||||
```bash
|
||||
npm run example:speech
|
||||
```
|
||||
|
||||
Or using `ts-node` directly:
|
||||
```bash
|
||||
npx ts-node examples/speech-to-text-example.ts
|
||||
```
|
||||
|
||||
## Features Demonstrated
|
||||
|
||||
1. **Wake Word Detection**
|
||||
- Listens for wake words: "hey jarvis", "ok google", "alexa"
|
||||
- Automatically saves audio when wake word is detected
|
||||
- Transcribes the detected speech
|
||||
|
||||
2. **Manual Transcription**
|
||||
- Example of how to transcribe audio files manually
|
||||
- Supports different models and configurations
|
||||
|
||||
3. **Event Handling**
|
||||
- Wake word detection events
|
||||
- Transcription results
|
||||
- Progress updates
|
||||
- Error handling
|
||||
|
||||
## Example Output
|
||||
|
||||
When a wake word is detected, you'll see output like this:
|
||||
|
||||
```
|
||||
🎤 Wake word detected!
|
||||
Timestamp: 20240203_123456
|
||||
Audio file: /path/to/audio/wake_word_20240203_123456.wav
|
||||
Metadata file: /path/to/audio/wake_word_20240203_123456.wav.json
|
||||
|
||||
📝 Transcription result:
|
||||
Full text: This is what was said after the wake word.
|
||||
|
||||
Segments:
|
||||
1. [0.00s - 1.52s] (95.5% confidence)
|
||||
"This is what was said"
|
||||
2. [1.52s - 2.34s] (98.2% confidence)
|
||||
"after the wake word."
|
||||
```
|
||||
|
||||
## Customization
|
||||
|
||||
You can customize the behavior by:
|
||||
|
||||
1. Changing the wake word models in `docker/speech/Dockerfile`
|
||||
2. Modifying transcription options in the example file
|
||||
3. Adding your own event handlers
|
||||
4. Implementing different audio processing logic
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
1. **Docker Issues**
|
||||
- Make sure Docker is running
|
||||
- Check container logs: `docker-compose logs fast-whisper`
|
||||
- Verify container is up: `docker ps`
|
||||
|
||||
2. **Audio Issues**
|
||||
- Check audio device permissions
|
||||
- Verify audio file format (WAV files recommended)
|
||||
- Check audio file permissions
|
||||
|
||||
3. **Performance Issues**
|
||||
- Try using a smaller model (tiny.en or base.en)
|
||||
- Adjust beam size and patience parameters
|
||||
- Consider using GPU acceleration if available
|
||||
91
examples/speech-to-text-example.ts
Normal file
91
examples/speech-to-text-example.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { SpeechToText, TranscriptionResult, WakeWordEvent } from '../src/speech/speechToText';
|
||||
import path from 'path';
|
||||
|
||||
async function main() {
|
||||
// Initialize the speech-to-text service
|
||||
const speech = new SpeechToText('fast-whisper');
|
||||
|
||||
// Check if the service is available
|
||||
const isHealthy = await speech.checkHealth();
|
||||
if (!isHealthy) {
|
||||
console.error('Speech service is not available. Make sure Docker is running and the fast-whisper container is up.');
|
||||
console.error('Run: docker-compose up -d');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('Speech service is ready!');
|
||||
console.log('Listening for wake words: "hey jarvis", "ok google", "alexa"');
|
||||
console.log('Press Ctrl+C to exit');
|
||||
|
||||
// Set up event handlers
|
||||
speech.on('wake_word', (event: WakeWordEvent) => {
|
||||
console.log('\n🎤 Wake word detected!');
|
||||
console.log(' Timestamp:', event.timestamp);
|
||||
console.log(' Audio file:', event.audioFile);
|
||||
console.log(' Metadata file:', event.metadataFile);
|
||||
});
|
||||
|
||||
speech.on('transcription', (event: { audioFile: string; result: TranscriptionResult }) => {
|
||||
console.log('\n📝 Transcription result:');
|
||||
console.log(' Full text:', event.result.text);
|
||||
console.log('\n Segments:');
|
||||
event.result.segments.forEach((segment, index) => {
|
||||
console.log(` ${index + 1}. [${segment.start.toFixed(2)}s - ${segment.end.toFixed(2)}s] (${(segment.confidence * 100).toFixed(1)}% confidence)`);
|
||||
console.log(` "${segment.text}"`);
|
||||
});
|
||||
});
|
||||
|
||||
speech.on('progress', (event: { type: string; data: string }) => {
|
||||
if (event.type === 'stderr' && !event.data.includes('Loading model')) {
|
||||
console.error('❌ Error:', event.data);
|
||||
}
|
||||
});
|
||||
|
||||
speech.on('error', (error: Error) => {
|
||||
console.error('❌ Error:', error.message);
|
||||
});
|
||||
|
||||
// Example of manual transcription
|
||||
async function transcribeFile(filepath: string) {
|
||||
try {
|
||||
console.log(`\n🎯 Manually transcribing: ${filepath}`);
|
||||
const result = await speech.transcribeAudio(filepath, {
|
||||
model: 'base.en', // You can change this to tiny.en, small.en, medium.en, or large-v2
|
||||
language: 'en',
|
||||
temperature: 0,
|
||||
beamSize: 5
|
||||
});
|
||||
|
||||
console.log('\n📝 Transcription result:');
|
||||
console.log(' Text:', result.text);
|
||||
} catch (error) {
|
||||
console.error('❌ Transcription failed:', error instanceof Error ? error.message : error);
|
||||
}
|
||||
}
|
||||
|
||||
// Create audio directory if it doesn't exist
|
||||
const audioDir = path.join(__dirname, '..', 'audio');
|
||||
if (!require('fs').existsSync(audioDir)) {
|
||||
require('fs').mkdirSync(audioDir, { recursive: true });
|
||||
}
|
||||
|
||||
// Start wake word detection
|
||||
speech.startWakeWordDetection(audioDir);
|
||||
|
||||
// Example: You can also manually transcribe files
|
||||
// Uncomment the following line and replace with your audio file:
|
||||
// await transcribeFile('/path/to/your/audio.wav');
|
||||
|
||||
// Keep the process running
|
||||
process.on('SIGINT', () => {
|
||||
console.log('\nStopping speech service...');
|
||||
speech.stopWakeWordDetection();
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
// Run the example
|
||||
main().catch(error => {
|
||||
console.error('Fatal error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,85 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = (request, options) => {
|
||||
// Handle chalk and related packages
|
||||
if (request === 'chalk' || request === '#ansi-styles' || request === '#supports-color') {
|
||||
return path.resolve(__dirname, 'node_modules', request.replace('#', ''));
|
||||
}
|
||||
|
||||
// Handle source files with .js extension
|
||||
if (request.endsWith('.js')) {
|
||||
const tsRequest = request.replace(/\.js$/, '.ts');
|
||||
try {
|
||||
return options.defaultResolver(tsRequest, {
|
||||
...options,
|
||||
packageFilter: pkg => {
|
||||
if (pkg.type === 'module') {
|
||||
if (pkg.exports && pkg.exports.import) {
|
||||
pkg.main = pkg.exports.import;
|
||||
} else if (pkg.module) {
|
||||
pkg.main = pkg.module;
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
// If the .ts file doesn't exist, try resolving without extension
|
||||
try {
|
||||
return options.defaultResolver(request.replace(/\.js$/, ''), options);
|
||||
} catch (e2) {
|
||||
// If that fails too, try resolving with .ts extension
|
||||
try {
|
||||
return options.defaultResolver(tsRequest, options);
|
||||
} catch (e3) {
|
||||
// If all attempts fail, try resolving the original request
|
||||
return options.defaultResolver(request, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle @digital-alchemy packages
|
||||
if (request.startsWith('@digital-alchemy/')) {
|
||||
try {
|
||||
const packagePath = path.resolve(__dirname, 'node_modules', request);
|
||||
return options.defaultResolver(packagePath, {
|
||||
...options,
|
||||
packageFilter: pkg => {
|
||||
if (pkg.type === 'module') {
|
||||
if (pkg.exports && pkg.exports.import) {
|
||||
pkg.main = pkg.exports.import;
|
||||
} else if (pkg.module) {
|
||||
pkg.main = pkg.module;
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
// If resolution fails, continue with default resolver
|
||||
}
|
||||
}
|
||||
|
||||
// Call the default resolver with enhanced module resolution
|
||||
return options.defaultResolver(request, {
|
||||
...options,
|
||||
// Handle ESM modules
|
||||
packageFilter: pkg => {
|
||||
if (pkg.type === 'module') {
|
||||
if (pkg.exports) {
|
||||
if (pkg.exports.import) {
|
||||
pkg.main = pkg.exports.import;
|
||||
} else if (typeof pkg.exports === 'string') {
|
||||
pkg.main = pkg.exports;
|
||||
}
|
||||
} else if (pkg.module) {
|
||||
pkg.main = pkg.module;
|
||||
}
|
||||
}
|
||||
return pkg;
|
||||
},
|
||||
extensions: ['.ts', '.tsx', '.js', '.jsx', '.json'],
|
||||
paths: [...(options.paths || []), path.resolve(__dirname, 'src')]
|
||||
});
|
||||
};
|
||||
@@ -1,17 +0,0 @@
|
||||
/** @type {import('bun:test').BunTestConfig} */
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
moduleFileExtensions: ['ts', 'js', 'json', 'node'],
|
||||
testMatch: ['**/__tests__/**/*.test.ts'],
|
||||
collectCoverage: true,
|
||||
coverageDirectory: 'coverage',
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
statements: 50,
|
||||
branches: 50,
|
||||
functions: 50,
|
||||
lines: 50
|
||||
}
|
||||
},
|
||||
setupFilesAfterEnv: ['./jest.setup.ts']
|
||||
};
|
||||
@@ -1,87 +0,0 @@
|
||||
import { jest } from '@jest/globals';
|
||||
import dotenv from 'dotenv';
|
||||
import { TextEncoder, TextDecoder } from 'util';
|
||||
|
||||
// Load test environment variables
|
||||
dotenv.config({ path: '.env.test' });
|
||||
|
||||
// Set test environment
|
||||
process.env.NODE_ENV = 'test';
|
||||
process.env.ENCRYPTION_KEY = 'test-encryption-key-32-bytes-long!!!';
|
||||
process.env.JWT_SECRET = 'test-jwt-secret';
|
||||
process.env.HASS_URL = 'http://localhost:8123';
|
||||
process.env.HASS_TOKEN = 'test-token';
|
||||
process.env.CLAUDE_API_KEY = 'test_api_key';
|
||||
process.env.CLAUDE_MODEL = 'test_model';
|
||||
|
||||
// Add TextEncoder and TextDecoder to global scope
|
||||
Object.defineProperty(global, 'TextEncoder', {
|
||||
value: TextEncoder,
|
||||
writable: true
|
||||
});
|
||||
|
||||
Object.defineProperty(global, 'TextDecoder', {
|
||||
value: TextDecoder,
|
||||
writable: true
|
||||
});
|
||||
|
||||
// Configure console for tests
|
||||
const originalConsole = { ...console };
|
||||
global.console = {
|
||||
...console,
|
||||
log: jest.fn(),
|
||||
error: jest.fn(),
|
||||
warn: jest.fn(),
|
||||
info: jest.fn(),
|
||||
debug: jest.fn(),
|
||||
};
|
||||
|
||||
// Increase test timeout
|
||||
jest.setTimeout(30000);
|
||||
|
||||
// Mock WebSocket
|
||||
jest.mock('ws', () => {
|
||||
return {
|
||||
WebSocket: jest.fn().mockImplementation(() => ({
|
||||
on: jest.fn(),
|
||||
send: jest.fn(),
|
||||
close: jest.fn(),
|
||||
removeAllListeners: jest.fn()
|
||||
}))
|
||||
};
|
||||
});
|
||||
|
||||
// Mock chalk
|
||||
const createChalkMock = () => {
|
||||
const handler = {
|
||||
get(target: any, prop: string) {
|
||||
if (prop === 'default') {
|
||||
return createChalkMock();
|
||||
}
|
||||
return typeof prop === 'string' ? createChalkMock() : target[prop];
|
||||
},
|
||||
apply(target: any, thisArg: any, args: any[]) {
|
||||
return args[0];
|
||||
}
|
||||
};
|
||||
return new Proxy(() => { }, handler);
|
||||
};
|
||||
|
||||
jest.mock('chalk', () => createChalkMock());
|
||||
|
||||
// Mock ansi-styles
|
||||
jest.mock('ansi-styles', () => ({}), { virtual: true });
|
||||
|
||||
// Mock supports-color
|
||||
jest.mock('supports-color', () => ({}), { virtual: true });
|
||||
|
||||
// Reset mocks between tests
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
// Cleanup after tests
|
||||
afterEach(() => {
|
||||
jest.clearAllTimers();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
26
mkdocs.yml
Normal file
26
mkdocs.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
site_name: Home Assistant Model Context Protocol (MCP)
|
||||
site_url: https://yourusername.github.io/your-repo-name/
|
||||
repo_url: https://github.com/yourusername/your-repo-name
|
||||
|
||||
theme:
|
||||
name: material
|
||||
features:
|
||||
- navigation.tabs
|
||||
- navigation.sections
|
||||
- toc.integrate
|
||||
- search.suggest
|
||||
- search.highlight
|
||||
|
||||
markdown_extensions:
|
||||
- pymdownx.highlight
|
||||
- pymdownx.superfences
|
||||
- admonition
|
||||
- pymdownx.details
|
||||
|
||||
nav:
|
||||
- Home: index.md
|
||||
- Getting Started:
|
||||
- Installation: getting-started/installation.md
|
||||
- Configuration: getting-started/configuration.md
|
||||
- Usage: usage.md
|
||||
- Contributing: contributing.md
|
||||
93
package.json
93
package.json
@@ -1,66 +1,61 @@
|
||||
{
|
||||
"name": "homeassistant-mcp",
|
||||
"version": "0.1.0",
|
||||
"description": "Model Context Protocol Server for Home Assistant",
|
||||
"type": "module",
|
||||
"version": "1.0.0",
|
||||
"description": "Home Assistant Model Context Protocol",
|
||||
"main": "dist/index.js",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "bun run tsc",
|
||||
"start": "bun run dist/src/index.js",
|
||||
"dev": "bun --watch src/index.ts",
|
||||
"start": "bun run dist/index.js",
|
||||
"dev": "bun --hot --watch src/index.ts",
|
||||
"build": "bun build ./src/index.ts --outdir ./dist --target node --minify",
|
||||
"test": "bun test",
|
||||
"test:coverage": "bun test --coverage",
|
||||
"test:watch": "bun test --watch",
|
||||
"test:openai": "bun run openai_test.ts",
|
||||
"lint": "eslint src --ext .ts",
|
||||
"lint:fix": "eslint src --ext .ts --fix",
|
||||
"prepare": "bun run build",
|
||||
"clean": "rimraf dist",
|
||||
"types:check": "tsc --noEmit",
|
||||
"types:install": "bun add -d @types/node @types/jest"
|
||||
"test:coverage": "bun test --coverage",
|
||||
"test:ci": "bun test --coverage --bail",
|
||||
"test:update": "bun test --update-snapshots",
|
||||
"test:clear": "bun test --clear-cache",
|
||||
"test:staged": "bun test --findRelatedTests",
|
||||
"lint": "eslint . --ext .ts",
|
||||
"format": "prettier --write \"src/**/*.ts\"",
|
||||
"prepare": "husky install",
|
||||
"profile": "bun --inspect src/index.ts",
|
||||
"clean": "rm -rf dist .bun coverage",
|
||||
"typecheck": "bun x tsc --noEmit",
|
||||
"example:speech": "bun run examples/speech-to-text-example.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"@digital-alchemy/core": "^24.11.4",
|
||||
"@digital-alchemy/hass": "^24.11.4",
|
||||
"@types/chalk": "^0.4.31",
|
||||
"@types/jsonwebtoken": "^9.0.8",
|
||||
"@types/xmldom": "^0.1.34",
|
||||
"@xmldom/xmldom": "^0.9.7",
|
||||
"ajv": "^8.12.0",
|
||||
"chalk": "^5.4.1",
|
||||
"dotenv": "^16.3.1",
|
||||
"express": "^4.18.2",
|
||||
"express-rate-limit": "^7.1.5",
|
||||
"@elysiajs/cors": "^1.2.0",
|
||||
"@elysiajs/swagger": "^1.2.0",
|
||||
"@types/jsonwebtoken": "^9.0.5",
|
||||
"@types/node": "^20.11.24",
|
||||
"@types/sanitize-html": "^2.9.5",
|
||||
"@types/ws": "^8.5.10",
|
||||
"dotenv": "^16.4.5",
|
||||
"elysia": "^1.2.11",
|
||||
"helmet": "^7.1.0",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"litemcp": "^0.7.0",
|
||||
"uuid": "^9.0.1",
|
||||
"node-fetch": "^3.3.2",
|
||||
"sanitize-html": "^2.11.0",
|
||||
"typescript": "^5.3.3",
|
||||
"winston": "^3.11.0",
|
||||
"winston-daily-rotate-file": "^5.0.0",
|
||||
"ws": "^8.16.0",
|
||||
"zod": "^3.22.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/ajv": "^1.0.0",
|
||||
"@types/express": "^4.17.21",
|
||||
"@types/express-rate-limit": "^6.0.0",
|
||||
"@types/glob": "^8.1.0",
|
||||
"@types/helmet": "^4.0.0",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/node": "^20.17.16",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/uuid": "^9.0.8",
|
||||
"@types/winston": "^2.4.4",
|
||||
"@types/ws": "^8.5.10",
|
||||
"jest": "^29.7.0",
|
||||
"node-fetch": "^3.3.2",
|
||||
"openai": "^4.82.0",
|
||||
"rimraf": "^5.0.10",
|
||||
"supertest": "^6.3.4",
|
||||
"ts-jest": "^29.1.2",
|
||||
"tsx": "^4.7.0",
|
||||
"typescript": "^5.3.3"
|
||||
"@types/uuid": "^10.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^7.1.0",
|
||||
"@typescript-eslint/parser": "^7.1.0",
|
||||
"bun-types": "^1.2.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-plugin-prettier": "^5.1.3",
|
||||
"husky": "^9.0.11",
|
||||
"prettier": "^3.2.5",
|
||||
"supertest": "^6.3.3",
|
||||
"uuid": "^11.0.5"
|
||||
},
|
||||
"author": "Jango Blockchained",
|
||||
"license": "MIT",
|
||||
"packageManager": "bun@1.0.26"
|
||||
"engines": {
|
||||
"bun": ">=1.0.0"
|
||||
}
|
||||
}
|
||||
77
src/__mocks__/@digital-alchemy/hass.ts
Normal file
77
src/__mocks__/@digital-alchemy/hass.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { mock } from "bun:test";
|
||||
|
||||
export const LIB_HASS = {
|
||||
configuration: {
|
||||
name: "Home Assistant",
|
||||
version: "2024.2.0",
|
||||
location_name: "Home",
|
||||
time_zone: "UTC",
|
||||
components: ["automation", "script", "light", "switch"],
|
||||
unit_system: {
|
||||
temperature: "°C",
|
||||
length: "m",
|
||||
mass: "kg",
|
||||
pressure: "hPa",
|
||||
volume: "L",
|
||||
},
|
||||
},
|
||||
services: {
|
||||
light: {
|
||||
turn_on: mock(() => Promise.resolve()),
|
||||
turn_off: mock(() => Promise.resolve()),
|
||||
toggle: mock(() => Promise.resolve()),
|
||||
},
|
||||
switch: {
|
||||
turn_on: mock(() => Promise.resolve()),
|
||||
turn_off: mock(() => Promise.resolve()),
|
||||
toggle: mock(() => Promise.resolve()),
|
||||
},
|
||||
automation: {
|
||||
trigger: mock(() => Promise.resolve()),
|
||||
turn_on: mock(() => Promise.resolve()),
|
||||
turn_off: mock(() => Promise.resolve()),
|
||||
},
|
||||
script: {
|
||||
turn_on: mock(() => Promise.resolve()),
|
||||
turn_off: mock(() => Promise.resolve()),
|
||||
toggle: mock(() => Promise.resolve()),
|
||||
},
|
||||
},
|
||||
states: {
|
||||
light: {
|
||||
"light.living_room": {
|
||||
state: "on",
|
||||
attributes: {
|
||||
brightness: 255,
|
||||
color_temp: 300,
|
||||
friendly_name: "Living Room Light",
|
||||
},
|
||||
},
|
||||
"light.bedroom": {
|
||||
state: "off",
|
||||
attributes: {
|
||||
friendly_name: "Bedroom Light",
|
||||
},
|
||||
},
|
||||
},
|
||||
switch: {
|
||||
"switch.tv": {
|
||||
state: "off",
|
||||
attributes: {
|
||||
friendly_name: "TV",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
events: {
|
||||
subscribe: mock(() => Promise.resolve()),
|
||||
unsubscribe: mock(() => Promise.resolve()),
|
||||
fire: mock(() => Promise.resolve()),
|
||||
},
|
||||
connection: {
|
||||
subscribeEvents: mock(() => Promise.resolve()),
|
||||
subscribeMessage: mock(() => Promise.resolve()),
|
||||
sendMessage: mock(() => Promise.resolve()),
|
||||
close: mock(() => Promise.resolve()),
|
||||
},
|
||||
};
|
||||
61
src/__mocks__/litemcp.ts
Normal file
61
src/__mocks__/litemcp.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
export class LiteMCP {
|
||||
name: string;
|
||||
version: string;
|
||||
config: any;
|
||||
|
||||
constructor(config: any = {}) {
|
||||
this.name = "home-assistant";
|
||||
this.version = "1.0.0";
|
||||
this.config = config;
|
||||
}
|
||||
|
||||
async start() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async stop() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async connect() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async disconnect() {
|
||||
return Promise.resolve();
|
||||
}
|
||||
|
||||
async callService(domain: string, service: string, data: any = {}) {
|
||||
return Promise.resolve({ success: true });
|
||||
}
|
||||
|
||||
async getStates() {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
|
||||
async getState(entityId: string) {
|
||||
return Promise.resolve({
|
||||
entity_id: entityId,
|
||||
state: "unknown",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
async setState(entityId: string, state: string, attributes: any = {}) {
|
||||
return Promise.resolve({ success: true });
|
||||
}
|
||||
|
||||
onStateChanged(callback: (event: any) => void) {
|
||||
// Mock implementation
|
||||
}
|
||||
|
||||
onEvent(eventType: string, callback: (event: any) => void) {
|
||||
// Mock implementation
|
||||
}
|
||||
}
|
||||
|
||||
export const createMCP = (config: any = {}) => {
|
||||
return new LiteMCP(config);
|
||||
};
|
||||
143
src/__tests__/setup.ts
Normal file
143
src/__tests__/setup.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { config } from "dotenv";
|
||||
import path from "path";
|
||||
import {
|
||||
beforeAll,
|
||||
afterAll,
|
||||
beforeEach,
|
||||
describe,
|
||||
expect,
|
||||
it,
|
||||
mock,
|
||||
test,
|
||||
} from "bun:test";
|
||||
|
||||
// Type definitions for mocks
|
||||
type MockFn = ReturnType<typeof mock>;
|
||||
|
||||
interface MockInstance {
|
||||
mock: {
|
||||
calls: unknown[][];
|
||||
results: unknown[];
|
||||
instances: unknown[];
|
||||
lastCall?: unknown[];
|
||||
};
|
||||
}
|
||||
|
||||
// Test configuration
|
||||
const TEST_CONFIG = {
|
||||
TEST_JWT_SECRET: "test_jwt_secret_key_that_is_at_least_32_chars",
|
||||
TEST_TOKEN: "test_token_that_is_at_least_32_chars_long",
|
||||
TEST_CLIENT_IP: "127.0.0.1",
|
||||
};
|
||||
|
||||
// Load test environment variables
|
||||
config({ path: path.resolve(process.cwd(), ".env.test") });
|
||||
|
||||
// Global test setup
|
||||
beforeAll(() => {
|
||||
// Set required environment variables
|
||||
process.env.NODE_ENV = "test";
|
||||
process.env.JWT_SECRET = TEST_CONFIG.TEST_JWT_SECRET;
|
||||
process.env.TEST_TOKEN = TEST_CONFIG.TEST_TOKEN;
|
||||
|
||||
// Configure console output for tests
|
||||
if (!process.env.DEBUG) {
|
||||
console.error = mock(() => { });
|
||||
console.warn = mock(() => { });
|
||||
console.log = mock(() => { });
|
||||
}
|
||||
});
|
||||
|
||||
// Reset mocks between tests
|
||||
beforeEach(() => {
|
||||
// Clear all mock function calls
|
||||
const mockFns = Object.values(mock).filter(
|
||||
(value): value is MockFn => typeof value === "function" && "mock" in value,
|
||||
);
|
||||
mockFns.forEach((mockFn) => {
|
||||
if (mockFn.mock) {
|
||||
mockFn.mock.calls = [];
|
||||
mockFn.mock.results = [];
|
||||
mockFn.mock.instances = [];
|
||||
mockFn.mock.lastCall = undefined;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// Custom test utilities
|
||||
const testUtils = {
|
||||
// Mock WebSocket for SSE tests
|
||||
mockWebSocket: () => ({
|
||||
on: mock(() => { }),
|
||||
send: mock(() => { }),
|
||||
close: mock(() => { }),
|
||||
readyState: 1,
|
||||
OPEN: 1,
|
||||
removeAllListeners: mock(() => { }),
|
||||
}),
|
||||
|
||||
// Mock HTTP response for API tests
|
||||
mockResponse: () => {
|
||||
const res = {
|
||||
status: mock(() => res),
|
||||
json: mock(() => res),
|
||||
send: mock(() => res),
|
||||
end: mock(() => res),
|
||||
setHeader: mock(() => res),
|
||||
writeHead: mock(() => res),
|
||||
write: mock(() => true),
|
||||
removeHeader: mock(() => res),
|
||||
};
|
||||
return res;
|
||||
},
|
||||
|
||||
// Mock HTTP request for API tests
|
||||
mockRequest: (overrides: Record<string, unknown> = {}) => ({
|
||||
headers: { "content-type": "application/json" },
|
||||
body: {},
|
||||
query: {},
|
||||
params: {},
|
||||
ip: TEST_CONFIG.TEST_CLIENT_IP,
|
||||
method: "GET",
|
||||
path: "/api/test",
|
||||
is: mock((type: string) => type === "application/json"),
|
||||
...overrides,
|
||||
}),
|
||||
|
||||
// Create test client for SSE tests
|
||||
createTestClient: (id = "test-client") => ({
|
||||
id,
|
||||
ip: TEST_CONFIG.TEST_CLIENT_IP,
|
||||
connectedAt: new Date(),
|
||||
send: mock(() => { }),
|
||||
rateLimit: {
|
||||
count: 0,
|
||||
lastReset: Date.now(),
|
||||
},
|
||||
connectionTime: Date.now(),
|
||||
}),
|
||||
|
||||
// Create test event for SSE tests
|
||||
createTestEvent: (type = "test_event", data: unknown = {}) => ({
|
||||
event_type: type,
|
||||
data,
|
||||
origin: "test",
|
||||
time_fired: new Date().toISOString(),
|
||||
context: { id: "test" },
|
||||
}),
|
||||
|
||||
// Create test entity for Home Assistant tests
|
||||
createTestEntity: (entityId = "test.entity", state = "on") => ({
|
||||
entity_id: entityId,
|
||||
state,
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
}),
|
||||
|
||||
// Helper to wait for async operations
|
||||
wait: (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)),
|
||||
};
|
||||
|
||||
// Export test utilities and Bun test functions
|
||||
export { beforeAll, afterAll, beforeEach, describe, expect, it, mock, test, testUtils };
|
||||
@@ -1,8 +1,14 @@
|
||||
import express from 'express';
|
||||
import { z } from 'zod';
|
||||
import { NLPProcessor } from '../nlp/processor.js';
|
||||
import { AIRateLimit, AIContext, AIResponse, AIError, AIModel } from '../types/index.js';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import express from "express";
|
||||
import { z } from "zod";
|
||||
import { NLPProcessor } from "../nlp/processor.js";
|
||||
import {
|
||||
AIRateLimit,
|
||||
AIContext,
|
||||
AIResponse,
|
||||
AIError,
|
||||
AIModel,
|
||||
} from "../types/index.js";
|
||||
import rateLimit from "express-rate-limit";
|
||||
|
||||
const router = express.Router();
|
||||
const nlpProcessor = new NLPProcessor();
|
||||
@@ -15,17 +21,17 @@ const rateLimitConfig: AIRateLimit = {
|
||||
model_specific_limits: {
|
||||
claude: {
|
||||
requests_per_minute: 100,
|
||||
requests_per_hour: 1000
|
||||
requests_per_hour: 1000,
|
||||
},
|
||||
gpt4: {
|
||||
requests_per_minute: 50,
|
||||
requests_per_hour: 500
|
||||
requests_per_hour: 500,
|
||||
},
|
||||
custom: {
|
||||
requests_per_minute: 200,
|
||||
requests_per_hour: 2000
|
||||
}
|
||||
}
|
||||
requests_per_hour: 2000,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Request validation schemas
|
||||
@@ -37,21 +43,23 @@ const interpretRequestSchema = z.object({
|
||||
timestamp: z.string(),
|
||||
location: z.string(),
|
||||
previous_actions: z.array(z.any()),
|
||||
environment_state: z.record(z.any())
|
||||
environment_state: z.record(z.any()),
|
||||
}),
|
||||
model: z.enum(['claude', 'gpt4', 'custom']).optional()
|
||||
model: z.enum(["claude", "gpt4", "custom"]).optional(),
|
||||
});
|
||||
|
||||
// Rate limiters
|
||||
const globalLimiter = rateLimit({
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
max: rateLimitConfig.requests_per_minute
|
||||
max: rateLimitConfig.requests_per_minute,
|
||||
});
|
||||
|
||||
const modelSpecificLimiter = (model: string) => rateLimit({
|
||||
const modelSpecificLimiter = (model: string) =>
|
||||
rateLimit({
|
||||
windowMs: 60 * 1000,
|
||||
max: rateLimitConfig.model_specific_limits[model as AIModel]?.requests_per_minute ||
|
||||
rateLimitConfig.requests_per_minute
|
||||
max:
|
||||
rateLimitConfig.model_specific_limits[model as AIModel]
|
||||
?.requests_per_minute || rateLimitConfig.requests_per_minute,
|
||||
});
|
||||
|
||||
// Error handler middleware
|
||||
@@ -59,18 +67,18 @@ const errorHandler = (
|
||||
error: Error,
|
||||
req: express.Request,
|
||||
res: express.Response,
|
||||
next: express.NextFunction
|
||||
next: express.NextFunction,
|
||||
) => {
|
||||
const aiError: AIError = {
|
||||
code: 'PROCESSING_ERROR',
|
||||
code: "PROCESSING_ERROR",
|
||||
message: error.message,
|
||||
suggestion: 'Please try again with a different command format',
|
||||
suggestion: "Please try again with a different command format",
|
||||
recovery_options: [
|
||||
'Simplify your command',
|
||||
'Use standard command patterns',
|
||||
'Check device names and parameters'
|
||||
"Simplify your command",
|
||||
"Use standard command patterns",
|
||||
"Check device names and parameters",
|
||||
],
|
||||
context: req.body.context
|
||||
context: req.body.context,
|
||||
};
|
||||
|
||||
res.status(500).json({ error: aiError });
|
||||
@@ -78,15 +86,26 @@ const errorHandler = (
|
||||
|
||||
// Endpoints
|
||||
router.post(
|
||||
'/interpret',
|
||||
"/interpret",
|
||||
globalLimiter,
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
async (
|
||||
req: express.Request,
|
||||
res: express.Response,
|
||||
next: express.NextFunction,
|
||||
) => {
|
||||
try {
|
||||
const { input, context, model = 'claude' } = interpretRequestSchema.parse(req.body);
|
||||
const {
|
||||
input,
|
||||
context,
|
||||
model = "claude",
|
||||
} = interpretRequestSchema.parse(req.body);
|
||||
|
||||
// Apply model-specific rate limiting
|
||||
modelSpecificLimiter(model)(req, res, async () => {
|
||||
const { intent, confidence, error } = await nlpProcessor.processCommand(input, context);
|
||||
const { intent, confidence, error } = await nlpProcessor.processCommand(
|
||||
input,
|
||||
context,
|
||||
);
|
||||
|
||||
if (error) {
|
||||
return res.status(400).json({ error });
|
||||
@@ -96,39 +115,39 @@ router.post(
|
||||
|
||||
if (!isValid) {
|
||||
const suggestions = await nlpProcessor.suggestCorrections(input, {
|
||||
code: 'INVALID_INTENT',
|
||||
message: 'Could not understand the command with high confidence',
|
||||
suggestion: 'Please try rephrasing your command',
|
||||
code: "INVALID_INTENT",
|
||||
message: "Could not understand the command with high confidence",
|
||||
suggestion: "Please try rephrasing your command",
|
||||
recovery_options: [],
|
||||
context
|
||||
context,
|
||||
});
|
||||
|
||||
return res.status(400).json({
|
||||
error: {
|
||||
code: 'INVALID_INTENT',
|
||||
message: 'Could not understand the command with high confidence',
|
||||
suggestion: 'Please try rephrasing your command',
|
||||
code: "INVALID_INTENT",
|
||||
message: "Could not understand the command with high confidence",
|
||||
suggestion: "Please try rephrasing your command",
|
||||
recovery_options: suggestions,
|
||||
context
|
||||
}
|
||||
context,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
const response: AIResponse = {
|
||||
natural_language: `I'll ${intent.action} the ${intent.target.split('.').pop()}`,
|
||||
natural_language: `I'll ${intent.action} the ${intent.target.split(".").pop()}`,
|
||||
structured_data: {
|
||||
success: true,
|
||||
action_taken: intent.action,
|
||||
entities_affected: [intent.target],
|
||||
state_changes: intent.parameters
|
||||
state_changes: intent.parameters,
|
||||
},
|
||||
next_suggestions: [
|
||||
'Would you like to adjust any settings?',
|
||||
'Should I perform this action in other rooms?',
|
||||
'Would you like to schedule this action?'
|
||||
"Would you like to adjust any settings?",
|
||||
"Should I perform this action in other rooms?",
|
||||
"Would you like to schedule this action?",
|
||||
],
|
||||
confidence,
|
||||
context
|
||||
context,
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
@@ -136,15 +155,19 @@ router.post(
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/execute',
|
||||
"/execute",
|
||||
globalLimiter,
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
async (
|
||||
req: express.Request,
|
||||
res: express.Response,
|
||||
next: express.NextFunction,
|
||||
) => {
|
||||
try {
|
||||
const { intent, context, model = 'claude' } = req.body;
|
||||
const { intent, context, model = "claude" } = req.body;
|
||||
|
||||
// Apply model-specific rate limiting
|
||||
modelSpecificLimiter(model)(req, res, async () => {
|
||||
@@ -157,15 +180,15 @@ router.post(
|
||||
success: true,
|
||||
action_taken: intent.action,
|
||||
entities_affected: [intent.target],
|
||||
state_changes: intent.parameters
|
||||
state_changes: intent.parameters,
|
||||
},
|
||||
next_suggestions: [
|
||||
'Would you like to verify the state?',
|
||||
'Should I perform any related actions?',
|
||||
'Would you like to undo this action?'
|
||||
"Would you like to verify the state?",
|
||||
"Should I perform any related actions?",
|
||||
"Would you like to undo this action?",
|
||||
],
|
||||
confidence: { overall: 1, intent: 1, entities: 1, context: 1 },
|
||||
context
|
||||
context,
|
||||
};
|
||||
|
||||
res.json(response);
|
||||
@@ -173,24 +196,28 @@ router.post(
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/suggestions',
|
||||
"/suggestions",
|
||||
globalLimiter,
|
||||
async (req: express.Request, res: express.Response, next: express.NextFunction) => {
|
||||
async (
|
||||
req: express.Request,
|
||||
res: express.Response,
|
||||
next: express.NextFunction,
|
||||
) => {
|
||||
try {
|
||||
const { context, model = 'claude' } = req.body;
|
||||
const { context, model = "claude" } = req.body;
|
||||
|
||||
// Apply model-specific rate limiting
|
||||
modelSpecificLimiter(model)(req, res, async () => {
|
||||
// Generate context-aware suggestions
|
||||
const suggestions = [
|
||||
'Turn on the lights in the living room',
|
||||
'Set the temperature to 72 degrees',
|
||||
'Show me the current state of all devices',
|
||||
'Start the evening routine'
|
||||
"Turn on the lights in the living room",
|
||||
"Set the temperature to 72 degrees",
|
||||
"Show me the current state of all devices",
|
||||
"Start the evening routine",
|
||||
];
|
||||
|
||||
res.json({ suggestions });
|
||||
@@ -198,7 +225,7 @@ router.get(
|
||||
} catch (error) {
|
||||
next(error);
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
// Apply error handler
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AIContext, AIIntent } from '../types/index.js';
|
||||
import { AIContext, AIIntent } from "../types/index.js";
|
||||
|
||||
interface ContextAnalysis {
|
||||
confidence: number;
|
||||
@@ -19,9 +19,12 @@ export class ContextAnalyzer {
|
||||
// Location-based context
|
||||
{
|
||||
condition: (context, intent) =>
|
||||
Boolean(context.location && intent.target.includes(context.location.toLowerCase())),
|
||||
Boolean(
|
||||
context.location &&
|
||||
intent.target.includes(context.location.toLowerCase()),
|
||||
),
|
||||
relevance: 0.8,
|
||||
params: (context) => ({ location: context.location })
|
||||
params: (context) => ({ location: context.location }),
|
||||
},
|
||||
|
||||
// Time-based context
|
||||
@@ -32,40 +35,46 @@ export class ContextAnalyzer {
|
||||
},
|
||||
relevance: 0.6,
|
||||
params: (context) => ({
|
||||
time_of_day: this.getTimeOfDay(new Date(context.timestamp))
|
||||
})
|
||||
time_of_day: this.getTimeOfDay(new Date(context.timestamp)),
|
||||
}),
|
||||
},
|
||||
|
||||
// Previous action context
|
||||
{
|
||||
condition: (context, intent) => {
|
||||
const recentActions = context.previous_actions.slice(-3);
|
||||
return recentActions.some(action =>
|
||||
return recentActions.some(
|
||||
(action) =>
|
||||
action.target === intent.target ||
|
||||
action.action === intent.action
|
||||
action.action === intent.action,
|
||||
);
|
||||
},
|
||||
relevance: 0.7,
|
||||
params: (context) => ({
|
||||
recent_action: context.previous_actions[context.previous_actions.length - 1]
|
||||
})
|
||||
recent_action:
|
||||
context.previous_actions[context.previous_actions.length - 1],
|
||||
}),
|
||||
},
|
||||
|
||||
// Environment state context
|
||||
{
|
||||
condition: (context, intent) => {
|
||||
return Object.keys(context.environment_state).some(key =>
|
||||
return Object.keys(context.environment_state).some(
|
||||
(key) =>
|
||||
intent.target.includes(key) ||
|
||||
intent.parameters[key] !== undefined
|
||||
intent.parameters[key] !== undefined,
|
||||
);
|
||||
},
|
||||
relevance: 0.9,
|
||||
params: (context) => ({ environment: context.environment_state })
|
||||
}
|
||||
params: (context) => ({ environment: context.environment_state }),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async analyze(intent: AIIntent, context: AIContext): Promise<ContextAnalysis> {
|
||||
async analyze(
|
||||
intent: AIIntent,
|
||||
context: AIContext,
|
||||
): Promise<ContextAnalysis> {
|
||||
let totalConfidence = 0;
|
||||
let relevantParams: Record<string, any> = {};
|
||||
let applicableRules = 0;
|
||||
@@ -78,30 +87,29 @@ export class ContextAnalyzer {
|
||||
if (rule.params) {
|
||||
relevantParams = {
|
||||
...relevantParams,
|
||||
...rule.params(context)
|
||||
...rule.params(context),
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate normalized confidence
|
||||
const confidence = applicableRules > 0
|
||||
? totalConfidence / applicableRules
|
||||
: 0.5; // Default confidence if no rules apply
|
||||
const confidence =
|
||||
applicableRules > 0 ? totalConfidence / applicableRules : 0.5; // Default confidence if no rules apply
|
||||
|
||||
return {
|
||||
confidence,
|
||||
relevant_params: relevantParams
|
||||
relevant_params: relevantParams,
|
||||
};
|
||||
}
|
||||
|
||||
private getTimeOfDay(date: Date): string {
|
||||
const hour = date.getHours();
|
||||
|
||||
if (hour >= 5 && hour < 12) return 'morning';
|
||||
if (hour >= 12 && hour < 17) return 'afternoon';
|
||||
if (hour >= 17 && hour < 22) return 'evening';
|
||||
return 'night';
|
||||
if (hour >= 5 && hour < 12) return "morning";
|
||||
if (hour >= 12 && hour < 17) return "afternoon";
|
||||
if (hour >= 17 && hour < 22) return "evening";
|
||||
return "night";
|
||||
}
|
||||
|
||||
async updateContextRules(newRules: ContextRule[]): Promise<void> {
|
||||
@@ -126,7 +134,10 @@ export class ContextAnalyzer {
|
||||
}
|
||||
|
||||
// Validate environment state
|
||||
if (typeof context.environment_state !== 'object' || context.environment_state === null) {
|
||||
if (
|
||||
typeof context.environment_state !== "object" ||
|
||||
context.environment_state === null
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AIContext } from '../types/index.js';
|
||||
import { AIContext } from "../types/index.js";
|
||||
|
||||
interface ExtractedEntities {
|
||||
primary_target: string;
|
||||
@@ -18,21 +18,24 @@ export class EntityExtractor {
|
||||
|
||||
private initializePatterns(): void {
|
||||
// Device name variations
|
||||
this.deviceNameMap.set('living room light', 'light.living_room');
|
||||
this.deviceNameMap.set('kitchen light', 'light.kitchen');
|
||||
this.deviceNameMap.set('bedroom light', 'light.bedroom');
|
||||
this.deviceNameMap.set("living room light", "light.living_room");
|
||||
this.deviceNameMap.set("kitchen light", "light.kitchen");
|
||||
this.deviceNameMap.set("bedroom light", "light.bedroom");
|
||||
|
||||
// Parameter patterns
|
||||
this.parameterPatterns.set('brightness', /(\d+)\s*(%|percent)|bright(ness)?\s+(\d+)/i);
|
||||
this.parameterPatterns.set('temperature', /(\d+)\s*(degrees?|°)[CF]?/i);
|
||||
this.parameterPatterns.set('color', /(red|green|blue|white|warm|cool)/i);
|
||||
this.parameterPatterns.set(
|
||||
"brightness",
|
||||
/(\d+)\s*(%|percent)|bright(ness)?\s+(\d+)/i,
|
||||
);
|
||||
this.parameterPatterns.set("temperature", /(\d+)\s*(degrees?|°)[CF]?/i);
|
||||
this.parameterPatterns.set("color", /(red|green|blue|white|warm|cool)/i);
|
||||
}
|
||||
|
||||
async extract(input: string): Promise<ExtractedEntities> {
|
||||
const entities: ExtractedEntities = {
|
||||
primary_target: '',
|
||||
primary_target: "",
|
||||
parameters: {},
|
||||
confidence: 0
|
||||
confidence: 0,
|
||||
};
|
||||
|
||||
try {
|
||||
@@ -48,7 +51,10 @@ export class EntityExtractor {
|
||||
for (const [param, pattern] of this.parameterPatterns) {
|
||||
const match = input.match(pattern);
|
||||
if (match) {
|
||||
entities.parameters[param] = this.normalizeParameterValue(param, match[1]);
|
||||
entities.parameters[param] = this.normalizeParameterValue(
|
||||
param,
|
||||
match[1],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,29 +63,35 @@ export class EntityExtractor {
|
||||
|
||||
return entities;
|
||||
} catch (error) {
|
||||
console.error('Entity extraction error:', error);
|
||||
console.error("Entity extraction error:", error);
|
||||
return {
|
||||
primary_target: '',
|
||||
primary_target: "",
|
||||
parameters: {},
|
||||
confidence: 0
|
||||
confidence: 0,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private normalizeParameterValue(parameter: string, value: string): number | string {
|
||||
private normalizeParameterValue(
|
||||
parameter: string,
|
||||
value: string,
|
||||
): number | string {
|
||||
switch (parameter) {
|
||||
case 'brightness':
|
||||
case "brightness":
|
||||
return Math.min(100, Math.max(0, parseInt(value)));
|
||||
case 'temperature':
|
||||
case "temperature":
|
||||
return parseInt(value);
|
||||
case 'color':
|
||||
case "color":
|
||||
return value.toLowerCase();
|
||||
default:
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
private calculateConfidence(entities: ExtractedEntities, input: string): number {
|
||||
private calculateConfidence(
|
||||
entities: ExtractedEntities,
|
||||
input: string,
|
||||
): number {
|
||||
let confidence = 0;
|
||||
|
||||
// Device confidence
|
||||
|
||||
@@ -18,54 +18,47 @@ export class IntentClassifier {
|
||||
constructor() {
|
||||
this.actionPatterns = [
|
||||
{
|
||||
action: 'turn_on',
|
||||
patterns: [
|
||||
/turn\s+on/i,
|
||||
/switch\s+on/i,
|
||||
/enable/i,
|
||||
/activate/i
|
||||
]
|
||||
action: "turn_on",
|
||||
patterns: [/turn\s+on/i, /switch\s+on/i, /enable/i, /activate/i],
|
||||
},
|
||||
{
|
||||
action: 'turn_off',
|
||||
patterns: [
|
||||
/turn\s+off/i,
|
||||
/switch\s+off/i,
|
||||
/disable/i,
|
||||
/deactivate/i
|
||||
]
|
||||
action: "turn_off",
|
||||
patterns: [/turn\s+off/i, /switch\s+off/i, /disable/i, /deactivate/i],
|
||||
},
|
||||
{
|
||||
action: 'set',
|
||||
action: "set",
|
||||
patterns: [
|
||||
/set\s+(?:the\s+)?(.+)\s+to/i,
|
||||
/change\s+(?:the\s+)?(.+)\s+to/i,
|
||||
/adjust\s+(?:the\s+)?(.+)\s+to/i
|
||||
/adjust\s+(?:the\s+)?(.+)\s+to/i,
|
||||
],
|
||||
parameters: ['brightness', 'temperature', 'color']
|
||||
parameters: ["brightness", "temperature", "color"],
|
||||
},
|
||||
{
|
||||
action: 'query',
|
||||
action: "query",
|
||||
patterns: [
|
||||
/what\s+is/i,
|
||||
/get\s+(?:the\s+)?(.+)/i,
|
||||
/show\s+(?:the\s+)?(.+)/i,
|
||||
/tell\s+me/i
|
||||
]
|
||||
}
|
||||
/tell\s+me/i,
|
||||
],
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async classify(
|
||||
input: string,
|
||||
extractedEntities: { parameters: Record<string, any>; primary_target: string }
|
||||
extractedEntities: {
|
||||
parameters: Record<string, any>;
|
||||
primary_target: string;
|
||||
},
|
||||
): Promise<ClassifiedIntent> {
|
||||
let bestMatch: ClassifiedIntent = {
|
||||
action: '',
|
||||
target: '',
|
||||
action: "",
|
||||
target: "",
|
||||
confidence: 0,
|
||||
parameters: {},
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
};
|
||||
|
||||
for (const actionPattern of this.actionPatterns) {
|
||||
@@ -78,8 +71,12 @@ export class IntentClassifier {
|
||||
action: actionPattern.action,
|
||||
target: extractedEntities.primary_target,
|
||||
confidence,
|
||||
parameters: this.extractActionParameters(actionPattern, match, extractedEntities),
|
||||
raw_input: input
|
||||
parameters: this.extractActionParameters(
|
||||
actionPattern,
|
||||
match,
|
||||
extractedEntities,
|
||||
),
|
||||
raw_input: input,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -105,7 +102,7 @@ export class IntentClassifier {
|
||||
}
|
||||
|
||||
// Additional confidence for specific keywords
|
||||
const keywords = ['please', 'can you', 'would you'];
|
||||
const keywords = ["please", "can you", "would you"];
|
||||
for (const keyword of keywords) {
|
||||
if (input.toLowerCase().includes(keyword)) {
|
||||
confidence += 0.1;
|
||||
@@ -118,7 +115,10 @@ export class IntentClassifier {
|
||||
private extractActionParameters(
|
||||
actionPattern: ActionPattern,
|
||||
match: RegExpMatchArray,
|
||||
extractedEntities: { parameters: Record<string, any>; primary_target: string }
|
||||
extractedEntities: {
|
||||
parameters: Record<string, any>;
|
||||
primary_target: string;
|
||||
},
|
||||
): Record<string, any> {
|
||||
const parameters: Record<string, any> = {};
|
||||
|
||||
@@ -141,37 +141,40 @@ export class IntentClassifier {
|
||||
|
||||
private inferFromContext(
|
||||
input: string,
|
||||
extractedEntities: { parameters: Record<string, any>; primary_target: string }
|
||||
extractedEntities: {
|
||||
parameters: Record<string, any>;
|
||||
primary_target: string;
|
||||
},
|
||||
): ClassifiedIntent {
|
||||
// Default to 'set' action if parameters are present
|
||||
if (Object.keys(extractedEntities.parameters).length > 0) {
|
||||
return {
|
||||
action: 'set',
|
||||
action: "set",
|
||||
target: extractedEntities.primary_target,
|
||||
confidence: 0.5,
|
||||
parameters: extractedEntities.parameters,
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
};
|
||||
}
|
||||
|
||||
// Default to 'query' for question-like inputs
|
||||
if (input.match(/^(what|when|where|who|how|why)/i)) {
|
||||
return {
|
||||
action: 'query',
|
||||
target: extractedEntities.primary_target || 'system',
|
||||
action: "query",
|
||||
target: extractedEntities.primary_target || "system",
|
||||
confidence: 0.6,
|
||||
parameters: {},
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback with low confidence
|
||||
return {
|
||||
action: 'unknown',
|
||||
target: extractedEntities.primary_target || 'system',
|
||||
action: "unknown",
|
||||
target: extractedEntities.primary_target || "system",
|
||||
confidence: 0.3,
|
||||
parameters: {},
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
import { AIIntent, AIContext, AIConfidence, AIError } from '../types/index.js';
|
||||
import { EntityExtractor } from './entity-extractor.js';
|
||||
import { IntentClassifier } from './intent-classifier.js';
|
||||
import { ContextAnalyzer } from './context-analyzer.js';
|
||||
import { AIIntent, AIContext, AIConfidence, AIError } from "../types/index.js";
|
||||
import { EntityExtractor } from "./entity-extractor.js";
|
||||
import { IntentClassifier } from "./intent-classifier.js";
|
||||
import { ContextAnalyzer } from "./context-analyzer.js";
|
||||
|
||||
export class NLPProcessor {
|
||||
private entityExtractor: EntityExtractor;
|
||||
@@ -16,7 +16,7 @@ export class NLPProcessor {
|
||||
|
||||
async processCommand(
|
||||
input: string,
|
||||
context: AIContext
|
||||
context: AIContext,
|
||||
): Promise<{
|
||||
intent: AIIntent;
|
||||
confidence: AIConfidence;
|
||||
@@ -30,14 +30,21 @@ export class NLPProcessor {
|
||||
const intent = await this.intentClassifier.classify(input, entities);
|
||||
|
||||
// Analyze context relevance
|
||||
const contextRelevance = await this.contextAnalyzer.analyze(intent, context);
|
||||
const contextRelevance = await this.contextAnalyzer.analyze(
|
||||
intent,
|
||||
context,
|
||||
);
|
||||
|
||||
// Calculate confidence scores
|
||||
const confidence: AIConfidence = {
|
||||
overall: (intent.confidence + entities.confidence + contextRelevance.confidence) / 3,
|
||||
overall:
|
||||
(intent.confidence +
|
||||
entities.confidence +
|
||||
contextRelevance.confidence) /
|
||||
3,
|
||||
intent: intent.confidence,
|
||||
entities: entities.confidence,
|
||||
context: contextRelevance.confidence
|
||||
context: contextRelevance.confidence,
|
||||
};
|
||||
|
||||
// Create structured intent
|
||||
@@ -47,41 +54,42 @@ export class NLPProcessor {
|
||||
parameters: {
|
||||
...entities.parameters,
|
||||
...intent.parameters,
|
||||
context_parameters: contextRelevance.relevant_params
|
||||
context_parameters: contextRelevance.relevant_params,
|
||||
},
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
};
|
||||
|
||||
return {
|
||||
intent: structuredIntent,
|
||||
confidence
|
||||
confidence,
|
||||
};
|
||||
} catch (error: unknown) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
|
||||
const errorMessage =
|
||||
error instanceof Error ? error.message : "Unknown error occurred";
|
||||
return {
|
||||
intent: {
|
||||
action: 'error',
|
||||
target: 'system',
|
||||
action: "error",
|
||||
target: "system",
|
||||
parameters: {},
|
||||
raw_input: input
|
||||
raw_input: input,
|
||||
},
|
||||
confidence: {
|
||||
overall: 0,
|
||||
intent: 0,
|
||||
entities: 0,
|
||||
context: 0
|
||||
context: 0,
|
||||
},
|
||||
error: {
|
||||
code: 'NLP_PROCESSING_ERROR',
|
||||
code: "NLP_PROCESSING_ERROR",
|
||||
message: errorMessage,
|
||||
suggestion: 'Please try rephrasing your command',
|
||||
suggestion: "Please try rephrasing your command",
|
||||
recovery_options: [
|
||||
'Use simpler language',
|
||||
'Break down the command into smaller parts',
|
||||
'Specify the target device explicitly'
|
||||
"Use simpler language",
|
||||
"Break down the command into smaller parts",
|
||||
"Specify the target device explicitly",
|
||||
],
|
||||
context
|
||||
}
|
||||
context,
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -89,7 +97,7 @@ export class NLPProcessor {
|
||||
async validateIntent(
|
||||
intent: AIIntent,
|
||||
confidence: AIConfidence,
|
||||
threshold = 0.7
|
||||
threshold = 0.7,
|
||||
): Promise<boolean> {
|
||||
return (
|
||||
confidence.overall >= threshold &&
|
||||
@@ -99,31 +107,28 @@ export class NLPProcessor {
|
||||
);
|
||||
}
|
||||
|
||||
async suggestCorrections(
|
||||
input: string,
|
||||
error: AIError
|
||||
): Promise<string[]> {
|
||||
async suggestCorrections(input: string, error: AIError): Promise<string[]> {
|
||||
// Implement correction suggestions based on the error
|
||||
const suggestions: string[] = [];
|
||||
|
||||
if (error.code === 'ENTITY_NOT_FOUND') {
|
||||
if (error.code === "ENTITY_NOT_FOUND") {
|
||||
suggestions.push(
|
||||
'Try specifying the device name more clearly',
|
||||
'Use the exact device name from your Home Assistant setup'
|
||||
"Try specifying the device name more clearly",
|
||||
"Use the exact device name from your Home Assistant setup",
|
||||
);
|
||||
}
|
||||
|
||||
if (error.code === 'AMBIGUOUS_INTENT') {
|
||||
if (error.code === "AMBIGUOUS_INTENT") {
|
||||
suggestions.push(
|
||||
'Please specify what you want to do with the device',
|
||||
'Use action words like "turn on", "set", "adjust"'
|
||||
"Please specify what you want to do with the device",
|
||||
'Use action words like "turn on", "set", "adjust"',
|
||||
);
|
||||
}
|
||||
|
||||
if (error.code === 'CONTEXT_MISMATCH') {
|
||||
if (error.code === "CONTEXT_MISMATCH") {
|
||||
suggestions.push(
|
||||
'Specify the location if referring to a device',
|
||||
'Clarify which device you mean in the current context'
|
||||
"Specify the location if referring to a device",
|
||||
"Clarify which device you mean in the current context",
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { AIModel } from '../types/index.js';
|
||||
import { AIModel } from "../types/index.js";
|
||||
|
||||
interface PromptTemplate {
|
||||
system: string;
|
||||
@@ -33,13 +33,15 @@ Current context: {context}`,
|
||||
examples: [
|
||||
{
|
||||
user: "Turn on the living room lights",
|
||||
assistant: "I'll turn on the lights in the living room. Would you like me to set a specific brightness level?"
|
||||
assistant:
|
||||
"I'll turn on the lights in the living room. Would you like me to set a specific brightness level?",
|
||||
},
|
||||
{
|
||||
user: "Set the temperature to 72 degrees",
|
||||
assistant: "I'll set the temperature to 72°F. I'll monitor the temperature and let you know when it reaches the target."
|
||||
}
|
||||
]
|
||||
assistant:
|
||||
"I'll set the temperature to 72°F. I'll monitor the temperature and let you know when it reaches the target.",
|
||||
},
|
||||
],
|
||||
},
|
||||
[AIModel.GPT4]: {
|
||||
system: `You are a home automation assistant powered by GPT-4.
|
||||
@@ -52,13 +54,15 @@ Context: {context}`,
|
||||
examples: [
|
||||
{
|
||||
user: "Dim the bedroom lights to 50%",
|
||||
assistant: "Setting bedroom light brightness to 50%. The change has been applied successfully."
|
||||
assistant:
|
||||
"Setting bedroom light brightness to 50%. The change has been applied successfully.",
|
||||
},
|
||||
{
|
||||
user: "Start the evening routine",
|
||||
assistant: "Initiating evening routine: dimming lights, adjusting temperature, and enabling security system."
|
||||
}
|
||||
]
|
||||
assistant:
|
||||
"Initiating evening routine: dimming lights, adjusting temperature, and enabling security system.",
|
||||
},
|
||||
],
|
||||
},
|
||||
[AIModel.CUSTOM]: {
|
||||
system: `Custom home automation assistant configuration.
|
||||
@@ -73,14 +77,16 @@ Current context: {context}`,
|
||||
examples: [
|
||||
{
|
||||
user: "Make it cooler in here",
|
||||
assistant: "Based on your preferences, I'll lower the temperature by 2 degrees. Current temperature is 74°F, adjusting to 72°F."
|
||||
assistant:
|
||||
"Based on your preferences, I'll lower the temperature by 2 degrees. Current temperature is 74°F, adjusting to 72°F.",
|
||||
},
|
||||
{
|
||||
user: "Set up movie mode",
|
||||
assistant: "Activating movie mode: dimming lights to 20%, closing blinds, setting TV input to HDMI 1, and adjusting sound system."
|
||||
}
|
||||
]
|
||||
}
|
||||
assistant:
|
||||
"Activating movie mode: dimming lights to 20%, closing blinds, setting TV input to HDMI 1, and adjusting sound system.",
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -95,7 +101,7 @@ Current context: {context}`,
|
||||
// Replace variables in the prompt
|
||||
for (const [key, value] of Object.entries(variables)) {
|
||||
const placeholder = `{${key}}`;
|
||||
if (typeof value === 'object') {
|
||||
if (typeof value === "object") {
|
||||
prompt = prompt.replace(placeholder, JSON.stringify(value));
|
||||
} else {
|
||||
prompt = prompt.replace(placeholder, String(value));
|
||||
@@ -115,7 +121,7 @@ Current context: {context}`,
|
||||
|
||||
addExample(
|
||||
model: AIModel,
|
||||
example: { user: string; assistant: string }
|
||||
example: { user: string; assistant: string },
|
||||
): void {
|
||||
this.templates[model].examples.push(example);
|
||||
}
|
||||
@@ -124,10 +130,7 @@ Current context: {context}`,
|
||||
this.templates[model].system = newPrompt;
|
||||
}
|
||||
|
||||
createCustomTemplate(
|
||||
model: AIModel.CUSTOM,
|
||||
template: PromptTemplate
|
||||
): void {
|
||||
createCustomTemplate(model: AIModel.CUSTOM, template: PromptTemplate): void {
|
||||
this.templates[model] = template;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { z } from 'zod';
|
||||
import { z } from "zod";
|
||||
|
||||
// AI Model Types
|
||||
export enum AIModel {
|
||||
CLAUDE = 'claude',
|
||||
GPT4 = 'gpt4',
|
||||
CUSTOM = 'custom'
|
||||
CLAUDE = "claude",
|
||||
GPT4 = "gpt4",
|
||||
CUSTOM = "custom",
|
||||
}
|
||||
|
||||
// AI Confidence Level
|
||||
@@ -61,10 +61,13 @@ export interface AIRateLimit {
|
||||
requests_per_minute: number;
|
||||
requests_per_hour: number;
|
||||
concurrent_requests: number;
|
||||
model_specific_limits: Record<AIModel, {
|
||||
model_specific_limits: Record<
|
||||
AIModel,
|
||||
{
|
||||
requests_per_minute: number;
|
||||
requests_per_hour: number;
|
||||
}>;
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
// Zod Schemas
|
||||
@@ -72,14 +75,14 @@ export const AIConfidenceSchema = z.object({
|
||||
overall: z.number().min(0).max(1),
|
||||
intent: z.number().min(0).max(1),
|
||||
entities: z.number().min(0).max(1),
|
||||
context: z.number().min(0).max(1)
|
||||
context: z.number().min(0).max(1),
|
||||
});
|
||||
|
||||
export const AIIntentSchema = z.object({
|
||||
action: z.string(),
|
||||
target: z.string(),
|
||||
parameters: z.record(z.any()),
|
||||
raw_input: z.string()
|
||||
raw_input: z.string(),
|
||||
});
|
||||
|
||||
export const AIContextSchema = z.object({
|
||||
@@ -88,7 +91,7 @@ export const AIContextSchema = z.object({
|
||||
timestamp: z.string(),
|
||||
location: z.string(),
|
||||
previous_actions: z.array(AIIntentSchema),
|
||||
environment_state: z.record(z.any())
|
||||
environment_state: z.record(z.any()),
|
||||
});
|
||||
|
||||
export const AIResponseSchema = z.object({
|
||||
@@ -97,11 +100,11 @@ export const AIResponseSchema = z.object({
|
||||
success: z.boolean(),
|
||||
action_taken: z.string(),
|
||||
entities_affected: z.array(z.string()),
|
||||
state_changes: z.record(z.any())
|
||||
state_changes: z.record(z.any()),
|
||||
}),
|
||||
next_suggestions: z.array(z.string()),
|
||||
confidence: AIConfidenceSchema,
|
||||
context: AIContextSchema
|
||||
context: AIContextSchema,
|
||||
});
|
||||
|
||||
export const AIErrorSchema = z.object({
|
||||
@@ -109,15 +112,17 @@ export const AIErrorSchema = z.object({
|
||||
message: z.string(),
|
||||
suggestion: z.string(),
|
||||
recovery_options: z.array(z.string()),
|
||||
context: AIContextSchema
|
||||
context: AIContextSchema,
|
||||
});
|
||||
|
||||
export const AIRateLimitSchema = z.object({
|
||||
requests_per_minute: z.number(),
|
||||
requests_per_hour: z.number(),
|
||||
concurrent_requests: z.number(),
|
||||
model_specific_limits: z.record(z.object({
|
||||
model_specific_limits: z.record(
|
||||
z.object({
|
||||
requests_per_minute: z.number(),
|
||||
requests_per_hour: z.number()
|
||||
}))
|
||||
requests_per_hour: z.number(),
|
||||
}),
|
||||
),
|
||||
});
|
||||
@@ -1,21 +1,21 @@
|
||||
import { Router } from 'express';
|
||||
import { MCP_SCHEMA } from '../mcp/schema.js';
|
||||
import { middleware } from '../middleware/index.js';
|
||||
import { sseManager } from '../sse/index.js';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { TokenManager } from '../security/index.js';
|
||||
import { tools } from '../tools/index.js';
|
||||
import { Tool } from '../interfaces/index.js';
|
||||
import { Router } from "express";
|
||||
import { MCP_SCHEMA } from "../mcp/schema.js";
|
||||
import { middleware } from "../middleware/index.js";
|
||||
import { sseManager } from "../sse/index.js";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { TokenManager } from "../security/index.js";
|
||||
import { tools } from "../tools/index.js";
|
||||
import { Tool } from "../interfaces/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// MCP schema endpoint - no auth required as it's just the schema
|
||||
router.get('/mcp', (_req, res) => {
|
||||
router.get("/mcp", (_req, res) => {
|
||||
res.json(MCP_SCHEMA);
|
||||
});
|
||||
|
||||
// MCP execute endpoint - requires authentication
|
||||
router.post('/mcp/execute', middleware.authenticate, async (req, res) => {
|
||||
router.post("/mcp/execute", middleware.authenticate, async (req, res) => {
|
||||
try {
|
||||
const { tool: toolName, parameters } = req.body;
|
||||
|
||||
@@ -24,7 +24,7 @@ router.post('/mcp/execute', middleware.authenticate, async (req, res) => {
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: `Tool '${toolName}' not found`
|
||||
message: `Tool '${toolName}' not found`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -34,67 +34,72 @@ router.post('/mcp/execute', middleware.authenticate, async (req, res) => {
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/health', (_req, res) => {
|
||||
router.get("/health", (_req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: '0.1.0'
|
||||
version: "0.1.0",
|
||||
});
|
||||
});
|
||||
|
||||
// List devices endpoint
|
||||
router.get('/list_devices', middleware.authenticate, async (req, res) => {
|
||||
router.get("/list_devices", middleware.authenticate, async (req, res) => {
|
||||
try {
|
||||
const tool = tools.find((t: Tool) => t.name === 'list_devices');
|
||||
const tool = tools.find((t: Tool) => t.name === "list_devices");
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Tool not found'
|
||||
message: "Tool not found",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await tool.execute({ token: req.headers.authorization?.replace('Bearer ', '') });
|
||||
const result = await tool.execute({
|
||||
token: req.headers.authorization?.replace("Bearer ", ""),
|
||||
});
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Device control endpoint
|
||||
router.post('/control', middleware.authenticate, async (req, res) => {
|
||||
router.post("/control", middleware.authenticate, async (req, res) => {
|
||||
try {
|
||||
const tool = tools.find((t: Tool) => t.name === 'control');
|
||||
const tool = tools.find((t: Tool) => t.name === "control");
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Tool not found'
|
||||
message: "Tool not found",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await tool.execute({
|
||||
...req.body,
|
||||
token: req.headers.authorization?.replace('Bearer ', '')
|
||||
token: req.headers.authorization?.replace("Bearer ", ""),
|
||||
});
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// SSE endpoints
|
||||
router.get('/subscribe_events', middleware.wsRateLimiter, (req, res) => {
|
||||
router.get("/subscribe_events", middleware.wsRateLimiter, (req, res) => {
|
||||
try {
|
||||
// Get token from query parameter
|
||||
const token = req.query.token?.toString();
|
||||
@@ -102,48 +107,54 @@ router.get('/subscribe_events', middleware.wsRateLimiter, (req, res) => {
|
||||
if (!token || !TokenManager.validateToken(token)) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized - Invalid token",
|
||||
});
|
||||
}
|
||||
|
||||
// Set SSE headers
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': '*'
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache",
|
||||
Connection: "keep-alive",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
});
|
||||
|
||||
// Send initial connection message
|
||||
res.write(`data: ${JSON.stringify({
|
||||
type: 'connection',
|
||||
status: 'connected',
|
||||
timestamp: new Date().toISOString()
|
||||
})}\n\n`);
|
||||
res.write(
|
||||
`data: ${JSON.stringify({
|
||||
type: "connection",
|
||||
status: "connected",
|
||||
timestamp: new Date().toISOString(),
|
||||
})}\n\n`,
|
||||
);
|
||||
|
||||
const clientId = uuidv4();
|
||||
const client = {
|
||||
id: clientId,
|
||||
send: (data: string) => {
|
||||
res.write(`data: ${data}\n\n`);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Add client to SSE manager
|
||||
const sseClient = sseManager.addClient(client, token);
|
||||
if (!sseClient || !sseClient.authenticated) {
|
||||
res.write(`data: ${JSON.stringify({
|
||||
type: 'error',
|
||||
message: sseClient ? 'Authentication failed' : 'Maximum client limit reached',
|
||||
timestamp: new Date().toISOString()
|
||||
})}\n\n`);
|
||||
res.write(
|
||||
`data: ${JSON.stringify({
|
||||
type: "error",
|
||||
message: sseClient
|
||||
? "Authentication failed"
|
||||
: "Maximum client limit reached",
|
||||
timestamp: new Date().toISOString(),
|
||||
})}\n\n`,
|
||||
);
|
||||
return res.end();
|
||||
}
|
||||
|
||||
// Subscribe to events if specified
|
||||
const events = req.query.events?.toString().split(',').filter(Boolean);
|
||||
const events = req.query.events?.toString().split(",").filter(Boolean);
|
||||
if (events?.length) {
|
||||
events.forEach(event => sseManager.subscribeToEvent(clientId, event));
|
||||
events.forEach((event) => sseManager.subscribeToEvent(clientId, event));
|
||||
}
|
||||
|
||||
// Subscribe to entity if specified
|
||||
@@ -159,14 +170,14 @@ router.get('/subscribe_events', middleware.wsRateLimiter, (req, res) => {
|
||||
}
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
req.on("close", () => {
|
||||
sseManager.removeClient(clientId);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
@@ -185,19 +196,20 @@ router.get('/subscribe_events', middleware.wsRateLimiter, (req, res) => {
|
||||
* - total_entities_tracked: Number of entities being tracked
|
||||
* - subscriptions: Lists of entity, event, and domain subscriptions
|
||||
*/
|
||||
router.get('/get_sse_stats', middleware.authenticate, (_req, res) => {
|
||||
router.get("/get_sse_stats", middleware.authenticate, (_req, res) => {
|
||||
try {
|
||||
const stats = sseManager.getStatistics();
|
||||
res.json({
|
||||
success: true,
|
||||
timestamp: new Date().toISOString(),
|
||||
data: stats
|
||||
data: stats,
|
||||
});
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
timestamp: new Date().toISOString()
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
27
src/commands.ts
Normal file
27
src/commands.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
// Common commands that work with most entities
|
||||
export const commonCommands = ["turn_on", "turn_off", "toggle"] as const;
|
||||
|
||||
// Commands specific to cover entities
|
||||
export const coverCommands = [
|
||||
...commonCommands,
|
||||
"open",
|
||||
"close",
|
||||
"stop",
|
||||
"set_position",
|
||||
"set_tilt_position",
|
||||
] as const;
|
||||
|
||||
// Commands specific to climate entities
|
||||
export const climateCommands = [
|
||||
...commonCommands,
|
||||
"set_temperature",
|
||||
"set_hvac_mode",
|
||||
"set_fan_mode",
|
||||
"set_humidity",
|
||||
] as const;
|
||||
|
||||
// Types for command validation
|
||||
export type CommonCommand = (typeof commonCommands)[number];
|
||||
export type CoverCommand = (typeof coverCommands)[number];
|
||||
export type ClimateCommand = (typeof climateCommands)[number];
|
||||
export type Command = CommonCommand | CoverCommand | ClimateCommand;
|
||||
162
src/config/__tests__/test.config.ts
Normal file
162
src/config/__tests__/test.config.ts
Normal file
@@ -0,0 +1,162 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// Test configuration schema
|
||||
const testConfigSchema = z.object({
|
||||
// Test Environment
|
||||
TEST_PORT: z.number().default(3001),
|
||||
TEST_HOST: z.string().default("http://localhost"),
|
||||
TEST_WEBSOCKET_PORT: z.number().default(3002),
|
||||
|
||||
// Mock Authentication
|
||||
TEST_JWT_SECRET: z
|
||||
.string()
|
||||
.default("test_jwt_secret_key_that_is_at_least_32_chars"),
|
||||
TEST_TOKEN: z.string().default("test_token_that_is_at_least_32_chars_long"),
|
||||
TEST_INVALID_TOKEN: z.string().default("invalid_token"),
|
||||
|
||||
// Mock Client Settings
|
||||
TEST_CLIENT_IP: z.string().default("127.0.0.1"),
|
||||
TEST_MAX_CLIENTS: z.number().default(10),
|
||||
TEST_PING_INTERVAL: z.number().default(100),
|
||||
TEST_CLEANUP_INTERVAL: z.number().default(200),
|
||||
TEST_MAX_CONNECTION_AGE: z.number().default(1000),
|
||||
|
||||
// Mock Rate Limiting
|
||||
TEST_RATE_LIMIT_WINDOW: z.number().default(60000), // 1 minute
|
||||
TEST_RATE_LIMIT_MAX_REQUESTS: z.number().default(100),
|
||||
TEST_RATE_LIMIT_WEBSOCKET: z.number().default(1000),
|
||||
|
||||
// Mock Events
|
||||
TEST_EVENT_TYPES: z
|
||||
.array(z.string())
|
||||
.default([
|
||||
"state_changed",
|
||||
"automation_triggered",
|
||||
"script_executed",
|
||||
"service_called",
|
||||
]),
|
||||
|
||||
// Mock Entities
|
||||
TEST_ENTITIES: z
|
||||
.array(
|
||||
z.object({
|
||||
entity_id: z.string(),
|
||||
state: z.string(),
|
||||
attributes: z.record(z.any()),
|
||||
last_changed: z.string(),
|
||||
last_updated: z.string(),
|
||||
}),
|
||||
)
|
||||
.default([
|
||||
{
|
||||
entity_id: "light.test_light",
|
||||
state: "on",
|
||||
attributes: {
|
||||
brightness: 255,
|
||||
color_temp: 400,
|
||||
},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
},
|
||||
{
|
||||
entity_id: "switch.test_switch",
|
||||
state: "off",
|
||||
attributes: {},
|
||||
last_changed: new Date().toISOString(),
|
||||
last_updated: new Date().toISOString(),
|
||||
},
|
||||
]),
|
||||
|
||||
// Mock Services
|
||||
TEST_SERVICES: z
|
||||
.array(
|
||||
z.object({
|
||||
domain: z.string(),
|
||||
service: z.string(),
|
||||
data: z.record(z.any()),
|
||||
}),
|
||||
)
|
||||
.default([
|
||||
{
|
||||
domain: "light",
|
||||
service: "turn_on",
|
||||
data: {
|
||||
entity_id: "light.test_light",
|
||||
brightness: 255,
|
||||
},
|
||||
},
|
||||
{
|
||||
domain: "switch",
|
||||
service: "turn_off",
|
||||
data: {
|
||||
entity_id: "switch.test_switch",
|
||||
},
|
||||
},
|
||||
]),
|
||||
|
||||
// Mock Error Scenarios
|
||||
TEST_ERROR_SCENARIOS: z
|
||||
.array(
|
||||
z.object({
|
||||
type: z.string(),
|
||||
message: z.string(),
|
||||
code: z.number(),
|
||||
}),
|
||||
)
|
||||
.default([
|
||||
{
|
||||
type: "authentication_error",
|
||||
message: "Invalid token",
|
||||
code: 401,
|
||||
},
|
||||
{
|
||||
type: "rate_limit_error",
|
||||
message: "Too many requests",
|
||||
code: 429,
|
||||
},
|
||||
{
|
||||
type: "validation_error",
|
||||
message: "Invalid request body",
|
||||
code: 400,
|
||||
},
|
||||
]),
|
||||
});
|
||||
|
||||
// Parse environment variables or use defaults
|
||||
const parseTestConfig = () => {
|
||||
const config = {
|
||||
TEST_PORT: parseInt(process.env.TEST_PORT || "3001"),
|
||||
TEST_HOST: process.env.TEST_HOST || "http://localhost",
|
||||
TEST_WEBSOCKET_PORT: parseInt(process.env.TEST_WEBSOCKET_PORT || "3002"),
|
||||
TEST_JWT_SECRET:
|
||||
process.env.TEST_JWT_SECRET ||
|
||||
"test_jwt_secret_key_that_is_at_least_32_chars",
|
||||
TEST_TOKEN:
|
||||
process.env.TEST_TOKEN || "test_token_that_is_at_least_32_chars_long",
|
||||
TEST_INVALID_TOKEN: process.env.TEST_INVALID_TOKEN || "invalid_token",
|
||||
TEST_CLIENT_IP: process.env.TEST_CLIENT_IP || "127.0.0.1",
|
||||
TEST_MAX_CLIENTS: parseInt(process.env.TEST_MAX_CLIENTS || "10"),
|
||||
TEST_PING_INTERVAL: parseInt(process.env.TEST_PING_INTERVAL || "100"),
|
||||
TEST_CLEANUP_INTERVAL: parseInt(process.env.TEST_CLEANUP_INTERVAL || "200"),
|
||||
TEST_MAX_CONNECTION_AGE: parseInt(
|
||||
process.env.TEST_MAX_CONNECTION_AGE || "1000",
|
||||
),
|
||||
TEST_RATE_LIMIT_WINDOW: parseInt(
|
||||
process.env.TEST_RATE_LIMIT_WINDOW || "60000",
|
||||
),
|
||||
TEST_RATE_LIMIT_MAX_REQUESTS: parseInt(
|
||||
process.env.TEST_RATE_LIMIT_MAX_REQUESTS || "100",
|
||||
),
|
||||
TEST_RATE_LIMIT_WEBSOCKET: parseInt(
|
||||
process.env.TEST_RATE_LIMIT_WEBSOCKET || "1000",
|
||||
),
|
||||
};
|
||||
|
||||
return testConfigSchema.parse(config);
|
||||
};
|
||||
|
||||
// Export the validated test configuration
|
||||
export const TEST_CONFIG = parseTestConfig();
|
||||
|
||||
// Export types
|
||||
export type TestConfig = z.infer<typeof testConfigSchema>;
|
||||
@@ -1,5 +1,6 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { config } from "dotenv";
|
||||
import { resolve } from "path";
|
||||
import { z } from "zod";
|
||||
|
||||
/**
|
||||
* Load environment variables based on NODE_ENV
|
||||
@@ -7,11 +8,12 @@ import { resolve } from 'path';
|
||||
* Test: .env.test
|
||||
* Production: .env
|
||||
*/
|
||||
const envFile = process.env.NODE_ENV === 'production'
|
||||
? '.env'
|
||||
: process.env.NODE_ENV === 'test'
|
||||
? '.env.test'
|
||||
: '.env.development';
|
||||
const envFile =
|
||||
process.env.NODE_ENV === "production"
|
||||
? ".env"
|
||||
: process.env.NODE_ENV === "test"
|
||||
? ".env.test"
|
||||
: ".env.development";
|
||||
|
||||
console.log(`Loading environment from ${envFile}`);
|
||||
config({ path: resolve(process.cwd(), envFile) });
|
||||
@@ -20,59 +22,76 @@ config({ path: resolve(process.cwd(), envFile) });
|
||||
* Application configuration object
|
||||
* Contains all configuration settings for the application
|
||||
*/
|
||||
export const APP_CONFIG = {
|
||||
export const AppConfigSchema = z.object({
|
||||
/** Server Configuration */
|
||||
PORT: process.env.PORT || 3000,
|
||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
||||
PORT: z.coerce.number().default(4000),
|
||||
NODE_ENV: z
|
||||
.enum(["development", "production", "test"])
|
||||
.default("development"),
|
||||
|
||||
/** Home Assistant Configuration */
|
||||
HASS_HOST: process.env.HASS_HOST || 'http://192.168.178.63:8123',
|
||||
HASS_TOKEN: process.env.HASS_TOKEN,
|
||||
HASS_HOST: z.string().default("http://192.168.178.63:8123"),
|
||||
HASS_TOKEN: z.string().optional(),
|
||||
|
||||
/** Speech Features Configuration */
|
||||
SPEECH: z.object({
|
||||
ENABLED: z.boolean().default(false),
|
||||
WAKE_WORD_ENABLED: z.boolean().default(false),
|
||||
SPEECH_TO_TEXT_ENABLED: z.boolean().default(false),
|
||||
WHISPER_MODEL_PATH: z.string().default("/models"),
|
||||
WHISPER_MODEL_TYPE: z.string().default("base"),
|
||||
}).default({
|
||||
ENABLED: false,
|
||||
WAKE_WORD_ENABLED: false,
|
||||
SPEECH_TO_TEXT_ENABLED: false,
|
||||
WHISPER_MODEL_PATH: "/models",
|
||||
WHISPER_MODEL_TYPE: "base",
|
||||
}),
|
||||
|
||||
/** Security Configuration */
|
||||
JWT_SECRET: process.env.JWT_SECRET || 'your-secret-key',
|
||||
RATE_LIMIT: {
|
||||
JWT_SECRET: z.string().default("your-secret-key"),
|
||||
RATE_LIMIT: z.object({
|
||||
/** Time window for rate limiting in milliseconds */
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
windowMs: z.number().default(15 * 60 * 1000), // 15 minutes
|
||||
/** Maximum number of requests per window */
|
||||
max: 100 // limit each IP to 100 requests per windowMs
|
||||
},
|
||||
max: z.number().default(100), // limit each IP to 100 requests per windowMs
|
||||
}),
|
||||
|
||||
/** Server-Sent Events Configuration */
|
||||
SSE: {
|
||||
SSE: z.object({
|
||||
/** Maximum number of concurrent SSE clients */
|
||||
MAX_CLIENTS: 1000,
|
||||
MAX_CLIENTS: z.number().default(1000),
|
||||
/** Ping interval in milliseconds to keep connections alive */
|
||||
PING_INTERVAL: 30000 // 30 seconds
|
||||
},
|
||||
PING_INTERVAL: z.number().default(30000), // 30 seconds
|
||||
}),
|
||||
|
||||
/** Logging Configuration */
|
||||
LOGGING: {
|
||||
LOGGING: z.object({
|
||||
/** Log level (error, warn, info, http, debug) */
|
||||
LEVEL: process.env.LOG_LEVEL || 'info',
|
||||
LEVEL: z.enum(["error", "warn", "info", "debug", "trace"]).default("info"),
|
||||
/** Directory for log files */
|
||||
DIR: process.env.LOG_DIR || 'logs',
|
||||
DIR: z.string().default("logs"),
|
||||
/** Maximum log file size before rotation */
|
||||
MAX_SIZE: process.env.LOG_MAX_SIZE || '20m',
|
||||
MAX_SIZE: z.string().default("20m"),
|
||||
/** Maximum number of days to keep log files */
|
||||
MAX_DAYS: process.env.LOG_MAX_DAYS || '14d',
|
||||
MAX_DAYS: z.string().default("14d"),
|
||||
/** Whether to compress rotated logs */
|
||||
COMPRESS: process.env.LOG_COMPRESS === 'true',
|
||||
COMPRESS: z.boolean().default(false),
|
||||
/** Format for timestamps in logs */
|
||||
TIMESTAMP_FORMAT: 'YYYY-MM-DD HH:mm:ss:ms',
|
||||
TIMESTAMP_FORMAT: z.string().default("YYYY-MM-DD HH:mm:ss:ms"),
|
||||
/** Whether to include request logging */
|
||||
LOG_REQUESTS: process.env.LOG_REQUESTS === 'true',
|
||||
},
|
||||
LOG_REQUESTS: z.boolean().default(false),
|
||||
}),
|
||||
|
||||
/** Application Version */
|
||||
VERSION: '0.1.0'
|
||||
} as const;
|
||||
VERSION: z.string().default("0.1.0"),
|
||||
});
|
||||
|
||||
/** Type definition for the configuration object */
|
||||
export type AppConfig = typeof APP_CONFIG;
|
||||
export type AppConfig = z.infer<typeof AppConfigSchema>;
|
||||
|
||||
/** Required environment variables that must be set */
|
||||
const requiredEnvVars = ['HASS_TOKEN'] as const;
|
||||
const requiredEnvVars = ["HASS_TOKEN"] as const;
|
||||
|
||||
/**
|
||||
* Validate that all required environment variables are set
|
||||
@@ -83,3 +102,37 @@ for (const envVar of requiredEnvVars) {
|
||||
throw new Error(`Missing required environment variable: ${envVar}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Load and validate configuration
|
||||
export const APP_CONFIG = AppConfigSchema.parse({
|
||||
PORT: process.env.PORT || 4000,
|
||||
NODE_ENV: process.env.NODE_ENV || "development",
|
||||
HASS_HOST: process.env.HASS_HOST || "http://192.168.178.63:8123",
|
||||
HASS_TOKEN: process.env.HASS_TOKEN,
|
||||
JWT_SECRET: process.env.JWT_SECRET || "your-secret-key",
|
||||
RATE_LIMIT: {
|
||||
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||
max: 100, // limit each IP to 100 requests per windowMs
|
||||
},
|
||||
SSE: {
|
||||
MAX_CLIENTS: 1000,
|
||||
PING_INTERVAL: 30000, // 30 seconds
|
||||
},
|
||||
LOGGING: {
|
||||
LEVEL: process.env.LOG_LEVEL || "info",
|
||||
DIR: process.env.LOG_DIR || "logs",
|
||||
MAX_SIZE: process.env.LOG_MAX_SIZE || "20m",
|
||||
MAX_DAYS: process.env.LOG_MAX_DAYS || "14d",
|
||||
COMPRESS: process.env.LOG_COMPRESS === "true",
|
||||
TIMESTAMP_FORMAT: "YYYY-MM-DD HH:mm:ss:ms",
|
||||
LOG_REQUESTS: process.env.LOG_REQUESTS === "true",
|
||||
},
|
||||
VERSION: "0.1.0",
|
||||
SPEECH: {
|
||||
ENABLED: process.env.ENABLE_SPEECH_FEATURES === "true",
|
||||
WAKE_WORD_ENABLED: process.env.ENABLE_WAKE_WORD === "true",
|
||||
SPEECH_TO_TEXT_ENABLED: process.env.ENABLE_SPEECH_TO_TEXT === "true",
|
||||
WHISPER_MODEL_PATH: process.env.WHISPER_MODEL_PATH || "/models",
|
||||
WHISPER_MODEL_TYPE: process.env.WHISPER_MODEL_TYPE || "base",
|
||||
},
|
||||
});
|
||||
|
||||
35
src/config/boilerplate.config.ts
Normal file
35
src/config/boilerplate.config.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export const BOILERPLATE_CONFIG = {
|
||||
configuration: {
|
||||
LOG_LEVEL: {
|
||||
type: "string" as const,
|
||||
default: "debug",
|
||||
description: "Logging level",
|
||||
enum: ["error", "warn", "info", "debug", "trace"],
|
||||
},
|
||||
CACHE_DIRECTORY: {
|
||||
type: "string" as const,
|
||||
default: ".cache",
|
||||
description: "Directory for cache files",
|
||||
},
|
||||
CONFIG_DIRECTORY: {
|
||||
type: "string" as const,
|
||||
default: ".config",
|
||||
description: "Directory for configuration files",
|
||||
},
|
||||
DATA_DIRECTORY: {
|
||||
type: "string" as const,
|
||||
default: ".data",
|
||||
description: "Directory for data files",
|
||||
},
|
||||
},
|
||||
internal: {
|
||||
boilerplate: {
|
||||
configuration: {
|
||||
LOG_LEVEL: "debug",
|
||||
CACHE_DIRECTORY: ".cache",
|
||||
CONFIG_DIRECTORY: ".config",
|
||||
DATA_DIRECTORY: ".data",
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
@@ -1,11 +1,50 @@
|
||||
import dotenv from 'dotenv';
|
||||
import { config } from "dotenv";
|
||||
import { resolve } from "path";
|
||||
|
||||
// Load environment variables
|
||||
dotenv.config();
|
||||
// Load environment variables based on NODE_ENV
|
||||
const envFile =
|
||||
process.env.NODE_ENV === "production"
|
||||
? ".env"
|
||||
: process.env.NODE_ENV === "test"
|
||||
? ".env.test"
|
||||
: ".env.development";
|
||||
|
||||
config({ path: resolve(process.cwd(), envFile) });
|
||||
|
||||
export const HASS_CONFIG = {
|
||||
BASE_URL: process.env.HASS_HOST || 'http://homeassistant.local:8123',
|
||||
TOKEN: process.env.HASS_TOKEN || '',
|
||||
SOCKET_URL: process.env.HASS_SOCKET_URL || '',
|
||||
SOCKET_TOKEN: process.env.HASS_TOKEN || '',
|
||||
// Base configuration
|
||||
BASE_URL: process.env.HASS_HOST || "http://localhost:8123",
|
||||
TOKEN: process.env.HASS_TOKEN || "",
|
||||
SOCKET_URL: process.env.HASS_WS_URL || "ws://localhost:8123/api/websocket",
|
||||
SOCKET_TOKEN: process.env.HASS_TOKEN || "",
|
||||
|
||||
// Boilerplate configuration
|
||||
BOILERPLATE: {
|
||||
CACHE_DIRECTORY: ".cache",
|
||||
CONFIG_DIRECTORY: ".config",
|
||||
DATA_DIRECTORY: ".data",
|
||||
LOG_LEVEL: "debug",
|
||||
ENVIRONMENT: process.env.NODE_ENV || "development",
|
||||
},
|
||||
|
||||
// Application configuration
|
||||
APP_NAME: "homeassistant-mcp",
|
||||
APP_VERSION: "1.0.0",
|
||||
|
||||
// API configuration
|
||||
API_VERSION: "1.0.0",
|
||||
API_PREFIX: "/api",
|
||||
|
||||
// Security configuration
|
||||
RATE_LIMIT: {
|
||||
WINDOW_MS: 15 * 60 * 1000, // 15 minutes
|
||||
MAX_REQUESTS: 100,
|
||||
},
|
||||
|
||||
// WebSocket configuration
|
||||
WS_CONFIG: {
|
||||
AUTO_RECONNECT: true,
|
||||
MAX_RECONNECT_ATTEMPTS: 3,
|
||||
RECONNECT_DELAY: 1000,
|
||||
},
|
||||
};
|
||||
@@ -1,76 +1,85 @@
|
||||
import { config } from 'dotenv';
|
||||
import { resolve } from 'path';
|
||||
import { config } from "dotenv";
|
||||
import { resolve } from "path";
|
||||
|
||||
// Load environment variables based on NODE_ENV
|
||||
const envFile = process.env.NODE_ENV === 'production'
|
||||
? '.env'
|
||||
: process.env.NODE_ENV === 'test'
|
||||
? '.env.test'
|
||||
: '.env.development';
|
||||
const envFile =
|
||||
process.env.NODE_ENV === "production"
|
||||
? ".env"
|
||||
: process.env.NODE_ENV === "test"
|
||||
? ".env.test"
|
||||
: ".env.development";
|
||||
|
||||
console.log(`Loading environment from ${envFile}`);
|
||||
config({ path: resolve(process.cwd(), envFile) });
|
||||
|
||||
// Home Assistant Configuration
|
||||
export const HASS_CONFIG = {
|
||||
HOST: process.env.HASS_HOST || 'http://homeassistant.local:8123',
|
||||
HOST: process.env.HASS_HOST || "http://homeassistant.local:8123",
|
||||
TOKEN: process.env.HASS_TOKEN,
|
||||
SOCKET_URL: process.env.HASS_SOCKET_URL || 'ws://homeassistant.local:8123/api/websocket',
|
||||
BASE_URL: process.env.HASS_HOST || 'http://homeassistant.local:8123',
|
||||
SOCKET_TOKEN: process.env.HASS_TOKEN
|
||||
SOCKET_URL:
|
||||
process.env.HASS_SOCKET_URL ||
|
||||
"ws://homeassistant.local:8123/api/websocket",
|
||||
BASE_URL: process.env.HASS_HOST || "http://homeassistant.local:8123",
|
||||
SOCKET_TOKEN: process.env.HASS_TOKEN,
|
||||
};
|
||||
|
||||
// Server Configuration
|
||||
export const SERVER_CONFIG = {
|
||||
PORT: parseInt(process.env.PORT || '3000', 10),
|
||||
NODE_ENV: process.env.NODE_ENV || 'development',
|
||||
DEBUG: process.env.DEBUG === 'true',
|
||||
LOG_LEVEL: process.env.LOG_LEVEL || 'info'
|
||||
PORT: parseInt(process.env.PORT || "3000", 10),
|
||||
NODE_ENV: process.env.NODE_ENV || "development",
|
||||
DEBUG: process.env.DEBUG === "true",
|
||||
LOG_LEVEL: process.env.LOG_LEVEL || "info",
|
||||
};
|
||||
|
||||
// AI Configuration
|
||||
export const AI_CONFIG = {
|
||||
PROCESSOR_TYPE: process.env.PROCESSOR_TYPE || 'claude',
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY
|
||||
PROCESSOR_TYPE: process.env.PROCESSOR_TYPE || "claude",
|
||||
OPENAI_API_KEY: process.env.OPENAI_API_KEY,
|
||||
};
|
||||
|
||||
// Rate Limiting Configuration
|
||||
export const RATE_LIMIT_CONFIG = {
|
||||
REGULAR: parseInt(process.env.RATE_LIMIT_REGULAR || '100', 10),
|
||||
WEBSOCKET: parseInt(process.env.RATE_LIMIT_WEBSOCKET || '1000', 10)
|
||||
REGULAR: parseInt(process.env.RATE_LIMIT_REGULAR || "100", 10),
|
||||
WEBSOCKET: parseInt(process.env.RATE_LIMIT_WEBSOCKET || "1000", 10),
|
||||
};
|
||||
|
||||
// Security Configuration
|
||||
export const SECURITY_CONFIG = {
|
||||
JWT_SECRET: process.env.JWT_SECRET || 'default_secret_key_change_in_production',
|
||||
CORS_ORIGINS: (process.env.CORS_ORIGINS || 'http://localhost:3000,http://localhost:8123')
|
||||
.split(',')
|
||||
.map(origin => origin.trim())
|
||||
JWT_SECRET:
|
||||
process.env.JWT_SECRET || "default_secret_key_change_in_production",
|
||||
CORS_ORIGINS: (
|
||||
process.env.CORS_ORIGINS || "http://localhost:3000,http://localhost:8123"
|
||||
)
|
||||
.split(",")
|
||||
.map((origin) => origin.trim()),
|
||||
};
|
||||
|
||||
// Test Configuration
|
||||
export const TEST_CONFIG = {
|
||||
HASS_HOST: process.env.TEST_HASS_HOST || 'http://localhost:8123',
|
||||
HASS_TOKEN: process.env.TEST_HASS_TOKEN || 'test_token',
|
||||
HASS_SOCKET_URL: process.env.TEST_HASS_SOCKET_URL || 'ws://localhost:8123/api/websocket',
|
||||
PORT: parseInt(process.env.TEST_PORT || '3001', 10)
|
||||
HASS_HOST: process.env.TEST_HASS_HOST || "http://localhost:8123",
|
||||
HASS_TOKEN: process.env.TEST_HASS_TOKEN || "test_token",
|
||||
HASS_SOCKET_URL:
|
||||
process.env.TEST_HASS_SOCKET_URL || "ws://localhost:8123/api/websocket",
|
||||
PORT: parseInt(process.env.TEST_PORT || "3001", 10),
|
||||
};
|
||||
|
||||
// Mock Configuration (for testing)
|
||||
export const MOCK_CONFIG = {
|
||||
SERVICES: process.env.MOCK_SERVICES === 'true',
|
||||
RESPONSES_DIR: process.env.MOCK_RESPONSES_DIR || '__tests__/mock-responses'
|
||||
SERVICES: process.env.MOCK_SERVICES === "true",
|
||||
RESPONSES_DIR: process.env.MOCK_RESPONSES_DIR || "__tests__/mock-responses",
|
||||
};
|
||||
|
||||
// Validate required configuration
|
||||
function validateConfig() {
|
||||
const missingVars: string[] = [];
|
||||
|
||||
if (!HASS_CONFIG.TOKEN) missingVars.push('HASS_TOKEN');
|
||||
if (!SECURITY_CONFIG.JWT_SECRET) missingVars.push('JWT_SECRET');
|
||||
if (!HASS_CONFIG.TOKEN) missingVars.push("HASS_TOKEN");
|
||||
if (!SECURITY_CONFIG.JWT_SECRET) missingVars.push("JWT_SECRET");
|
||||
|
||||
if (missingVars.length > 0) {
|
||||
throw new Error(`Missing required environment variables: ${missingVars.join(', ')}`);
|
||||
throw new Error(
|
||||
`Missing required environment variables: ${missingVars.join(", ")}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,5 +94,5 @@ export const AppConfig = {
|
||||
RATE_LIMIT: RATE_LIMIT_CONFIG,
|
||||
SECURITY: SECURITY_CONFIG,
|
||||
TEST: TEST_CONFIG,
|
||||
MOCK: MOCK_CONFIG
|
||||
MOCK: MOCK_CONFIG,
|
||||
};
|
||||
129
src/config/security.config.ts
Normal file
129
src/config/security.config.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// Security configuration schema
|
||||
const securityConfigSchema = z.object({
|
||||
// JWT Configuration
|
||||
JWT_SECRET: z.string().min(32),
|
||||
JWT_EXPIRY: z.number().default(24 * 60 * 60 * 1000), // 24 hours
|
||||
JWT_MAX_AGE: z.number().default(30 * 24 * 60 * 60 * 1000), // 30 days
|
||||
JWT_ALGORITHM: z.enum(["HS256", "HS384", "HS512"]).default("HS256"),
|
||||
|
||||
// Rate Limiting
|
||||
RATE_LIMIT_WINDOW: z.number().default(15 * 60 * 1000), // 15 minutes
|
||||
RATE_LIMIT_MAX_REQUESTS: z.number().default(100),
|
||||
RATE_LIMIT_WEBSOCKET: z.number().default(1000),
|
||||
|
||||
// Token Security
|
||||
TOKEN_MIN_LENGTH: z.number().default(32),
|
||||
MAX_FAILED_ATTEMPTS: z.number().default(5),
|
||||
LOCKOUT_DURATION: z.number().default(15 * 60 * 1000), // 15 minutes
|
||||
|
||||
// CORS Configuration
|
||||
CORS_ORIGINS: z
|
||||
.array(z.string())
|
||||
.default(["http://localhost:3000", "http://localhost:8123"]),
|
||||
CORS_METHODS: z
|
||||
.array(z.string())
|
||||
.default(["GET", "POST", "PUT", "DELETE", "OPTIONS"]),
|
||||
CORS_ALLOWED_HEADERS: z
|
||||
.array(z.string())
|
||||
.default(["Content-Type", "Authorization", "X-Requested-With"]),
|
||||
CORS_EXPOSED_HEADERS: z.array(z.string()).default([]),
|
||||
CORS_CREDENTIALS: z.boolean().default(true),
|
||||
CORS_MAX_AGE: z.number().default(24 * 60 * 60), // 24 hours
|
||||
|
||||
// Content Security Policy
|
||||
CSP_ENABLED: z.boolean().default(true),
|
||||
CSP_REPORT_ONLY: z.boolean().default(false),
|
||||
CSP_REPORT_URI: z.string().optional(),
|
||||
|
||||
// SSL/TLS Configuration
|
||||
REQUIRE_HTTPS: z.boolean().default(process.env.NODE_ENV === "production"),
|
||||
HSTS_MAX_AGE: z.number().default(31536000), // 1 year
|
||||
HSTS_INCLUDE_SUBDOMAINS: z.boolean().default(true),
|
||||
HSTS_PRELOAD: z.boolean().default(true),
|
||||
|
||||
// Cookie Security
|
||||
COOKIE_SECRET: z.string().min(32).optional(),
|
||||
COOKIE_SECURE: z.boolean().default(process.env.NODE_ENV === "production"),
|
||||
COOKIE_HTTP_ONLY: z.boolean().default(true),
|
||||
COOKIE_SAME_SITE: z.enum(["Strict", "Lax", "None"]).default("Strict"),
|
||||
|
||||
// Request Limits
|
||||
MAX_REQUEST_SIZE: z.number().default(1024 * 1024), // 1MB
|
||||
MAX_REQUEST_FIELDS: z.number().default(1000),
|
||||
});
|
||||
|
||||
// Parse environment variables
|
||||
const parseEnvConfig = () => {
|
||||
const config = {
|
||||
JWT_SECRET:
|
||||
process.env.JWT_SECRET || "default_secret_key_change_in_production",
|
||||
JWT_EXPIRY: parseInt(process.env.JWT_EXPIRY || "86400000"),
|
||||
JWT_MAX_AGE: parseInt(process.env.JWT_MAX_AGE || "2592000000"),
|
||||
JWT_ALGORITHM: process.env.JWT_ALGORITHM || "HS256",
|
||||
|
||||
RATE_LIMIT_WINDOW: parseInt(process.env.RATE_LIMIT_WINDOW || "900000"),
|
||||
RATE_LIMIT_MAX_REQUESTS: parseInt(
|
||||
process.env.RATE_LIMIT_MAX_REQUESTS || "100",
|
||||
),
|
||||
RATE_LIMIT_WEBSOCKET: parseInt(process.env.RATE_LIMIT_WEBSOCKET || "1000"),
|
||||
|
||||
TOKEN_MIN_LENGTH: parseInt(process.env.TOKEN_MIN_LENGTH || "32"),
|
||||
MAX_FAILED_ATTEMPTS: parseInt(process.env.MAX_FAILED_ATTEMPTS || "5"),
|
||||
LOCKOUT_DURATION: parseInt(process.env.LOCKOUT_DURATION || "900000"),
|
||||
|
||||
CORS_ORIGINS: (
|
||||
process.env.CORS_ORIGINS || "http://localhost:3000,http://localhost:8123"
|
||||
)
|
||||
.split(",")
|
||||
.map((origin) => origin.trim()),
|
||||
CORS_METHODS: (process.env.CORS_METHODS || "GET,POST,PUT,DELETE,OPTIONS")
|
||||
.split(",")
|
||||
.map((method) => method.trim()),
|
||||
CORS_ALLOWED_HEADERS: (
|
||||
process.env.CORS_ALLOWED_HEADERS ||
|
||||
"Content-Type,Authorization,X-Requested-With"
|
||||
)
|
||||
.split(",")
|
||||
.map((header) => header.trim()),
|
||||
CORS_EXPOSED_HEADERS: (process.env.CORS_EXPOSED_HEADERS || "")
|
||||
.split(",")
|
||||
.filter(Boolean)
|
||||
.map((header) => header.trim()),
|
||||
CORS_CREDENTIALS: process.env.CORS_CREDENTIALS !== "false",
|
||||
CORS_MAX_AGE: parseInt(process.env.CORS_MAX_AGE || "86400"),
|
||||
|
||||
CSP_ENABLED: process.env.CSP_ENABLED !== "false",
|
||||
CSP_REPORT_ONLY: process.env.CSP_REPORT_ONLY === "true",
|
||||
CSP_REPORT_URI: process.env.CSP_REPORT_URI,
|
||||
|
||||
REQUIRE_HTTPS:
|
||||
process.env.REQUIRE_HTTPS !== "false" &&
|
||||
process.env.NODE_ENV === "production",
|
||||
HSTS_MAX_AGE: parseInt(process.env.HSTS_MAX_AGE || "31536000"),
|
||||
HSTS_INCLUDE_SUBDOMAINS: process.env.HSTS_INCLUDE_SUBDOMAINS !== "false",
|
||||
HSTS_PRELOAD: process.env.HSTS_PRELOAD !== "false",
|
||||
|
||||
COOKIE_SECRET: process.env.COOKIE_SECRET,
|
||||
COOKIE_SECURE:
|
||||
process.env.COOKIE_SECURE !== "false" &&
|
||||
process.env.NODE_ENV === "production",
|
||||
COOKIE_HTTP_ONLY: process.env.COOKIE_HTTP_ONLY !== "false",
|
||||
COOKIE_SAME_SITE: (process.env.COOKIE_SAME_SITE || "Strict") as
|
||||
| "Strict"
|
||||
| "Lax"
|
||||
| "None",
|
||||
|
||||
MAX_REQUEST_SIZE: parseInt(process.env.MAX_REQUEST_SIZE || "1048576"),
|
||||
MAX_REQUEST_FIELDS: parseInt(process.env.MAX_REQUEST_FIELDS || "1000"),
|
||||
};
|
||||
|
||||
return securityConfigSchema.parse(config);
|
||||
};
|
||||
|
||||
// Export the validated configuration
|
||||
export const SECURITY_CONFIG = parseEnvConfig();
|
||||
|
||||
// Export types
|
||||
export type SecurityConfig = z.infer<typeof securityConfigSchema>;
|
||||
@@ -1,14 +1,14 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
// Resource types
|
||||
export enum ResourceType {
|
||||
DEVICE = 'device',
|
||||
AREA = 'area',
|
||||
USER = 'user',
|
||||
AUTOMATION = 'automation',
|
||||
SCENE = 'scene',
|
||||
SCRIPT = 'script',
|
||||
GROUP = 'group'
|
||||
DEVICE = "device",
|
||||
AREA = "area",
|
||||
USER = "user",
|
||||
AUTOMATION = "automation",
|
||||
SCENE = "scene",
|
||||
SCRIPT = "script",
|
||||
GROUP = "group",
|
||||
}
|
||||
|
||||
// Resource state interface
|
||||
@@ -23,11 +23,11 @@ export interface ResourceState {
|
||||
|
||||
// Resource relationship types
|
||||
export enum RelationType {
|
||||
CONTAINS = 'contains',
|
||||
CONTROLS = 'controls',
|
||||
TRIGGERS = 'triggers',
|
||||
DEPENDS_ON = 'depends_on',
|
||||
GROUPS = 'groups'
|
||||
CONTAINS = "contains",
|
||||
CONTROLS = "controls",
|
||||
TRIGGERS = "triggers",
|
||||
DEPENDS_ON = "depends_on",
|
||||
GROUPS = "groups",
|
||||
}
|
||||
|
||||
// Resource relationship interface
|
||||
@@ -52,7 +52,7 @@ export class ContextManager extends EventEmitter {
|
||||
// Resource management
|
||||
public addResource(resource: ResourceState): void {
|
||||
this.resources.set(resource.id, resource);
|
||||
this.emit('resource_added', resource);
|
||||
this.emit("resource_added", resource);
|
||||
}
|
||||
|
||||
public updateResource(id: string, update: Partial<ResourceState>): void {
|
||||
@@ -65,10 +65,10 @@ export class ContextManager extends EventEmitter {
|
||||
const updatedResource = {
|
||||
...resource,
|
||||
...update,
|
||||
lastUpdated: Date.now()
|
||||
lastUpdated: Date.now(),
|
||||
};
|
||||
this.resources.set(id, updatedResource);
|
||||
this.emit('resource_updated', updatedResource);
|
||||
this.emit("resource_updated", updatedResource);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,25 +78,32 @@ export class ContextManager extends EventEmitter {
|
||||
this.resources.delete(id);
|
||||
// Remove related relationships
|
||||
this.relationships = this.relationships.filter(
|
||||
rel => rel.sourceId !== id && rel.targetId !== id
|
||||
(rel) => rel.sourceId !== id && rel.targetId !== id,
|
||||
);
|
||||
this.emit('resource_removed', resource);
|
||||
this.emit("resource_removed", resource);
|
||||
}
|
||||
}
|
||||
|
||||
// Relationship management
|
||||
public addRelationship(relationship: ResourceRelationship): void {
|
||||
this.relationships.push(relationship);
|
||||
this.emit('relationship_added', relationship);
|
||||
this.emit("relationship_added", relationship);
|
||||
}
|
||||
|
||||
public removeRelationship(sourceId: string, targetId: string, type: RelationType): void {
|
||||
public removeRelationship(
|
||||
sourceId: string,
|
||||
targetId: string,
|
||||
type: RelationType,
|
||||
): void {
|
||||
const index = this.relationships.findIndex(
|
||||
rel => rel.sourceId === sourceId && rel.targetId === targetId && rel.type === type
|
||||
(rel) =>
|
||||
rel.sourceId === sourceId &&
|
||||
rel.targetId === targetId &&
|
||||
rel.type === type,
|
||||
);
|
||||
if (index !== -1) {
|
||||
const removed = this.relationships.splice(index, 1)[0];
|
||||
this.emit('relationship_removed', removed);
|
||||
this.emit("relationship_removed", removed);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -121,14 +128,14 @@ export class ContextManager extends EventEmitter {
|
||||
|
||||
public getResourcesByType(type: ResourceType): ResourceState[] {
|
||||
return Array.from(this.resources.values()).filter(
|
||||
resource => resource.type === type
|
||||
(resource) => resource.type === type,
|
||||
);
|
||||
}
|
||||
|
||||
public getRelatedResources(
|
||||
id: string,
|
||||
type?: RelationType,
|
||||
depth: number = 1
|
||||
depth: number = 1,
|
||||
): ResourceState[] {
|
||||
const related = new Set<ResourceState>();
|
||||
const visited = new Set<string>();
|
||||
@@ -138,12 +145,14 @@ export class ContextManager extends EventEmitter {
|
||||
visited.add(currentId);
|
||||
|
||||
this.relationships
|
||||
.filter(rel =>
|
||||
.filter(
|
||||
(rel) =>
|
||||
(rel.sourceId === currentId || rel.targetId === currentId) &&
|
||||
(!type || rel.type === type)
|
||||
(!type || rel.type === type),
|
||||
)
|
||||
.forEach(rel => {
|
||||
const relatedId = rel.sourceId === currentId ? rel.targetId : rel.sourceId;
|
||||
.forEach((rel) => {
|
||||
const relatedId =
|
||||
rel.sourceId === currentId ? rel.targetId : rel.sourceId;
|
||||
const relatedResource = this.resources.get(relatedId);
|
||||
if (relatedResource) {
|
||||
related.add(relatedResource);
|
||||
@@ -168,25 +177,29 @@ export class ContextManager extends EventEmitter {
|
||||
};
|
||||
} {
|
||||
const dependencies = this.relationships
|
||||
.filter(rel => rel.sourceId === id && rel.type === RelationType.DEPENDS_ON)
|
||||
.map(rel => rel.targetId);
|
||||
.filter(
|
||||
(rel) => rel.sourceId === id && rel.type === RelationType.DEPENDS_ON,
|
||||
)
|
||||
.map((rel) => rel.targetId);
|
||||
|
||||
const dependents = this.relationships
|
||||
.filter(rel => rel.targetId === id && rel.type === RelationType.DEPENDS_ON)
|
||||
.map(rel => rel.sourceId);
|
||||
.filter(
|
||||
(rel) => rel.targetId === id && rel.type === RelationType.DEPENDS_ON,
|
||||
)
|
||||
.map((rel) => rel.sourceId);
|
||||
|
||||
const groups = this.relationships
|
||||
.filter(rel => rel.targetId === id && rel.type === RelationType.GROUPS)
|
||||
.map(rel => rel.sourceId);
|
||||
.filter((rel) => rel.targetId === id && rel.type === RelationType.GROUPS)
|
||||
.map((rel) => rel.sourceId);
|
||||
|
||||
const usage = {
|
||||
triggerCount: this.relationships.filter(
|
||||
rel => rel.sourceId === id && rel.type === RelationType.TRIGGERS
|
||||
(rel) => rel.sourceId === id && rel.type === RelationType.TRIGGERS,
|
||||
).length,
|
||||
controlCount: this.relationships.filter(
|
||||
rel => rel.sourceId === id && rel.type === RelationType.CONTROLS
|
||||
(rel) => rel.sourceId === id && rel.type === RelationType.CONTROLS,
|
||||
).length,
|
||||
groupCount: groups.length
|
||||
groupCount: groups.length,
|
||||
};
|
||||
|
||||
return { dependencies, dependents, groups, usage };
|
||||
@@ -195,7 +208,7 @@ export class ContextManager extends EventEmitter {
|
||||
// Event subscriptions
|
||||
public subscribeToResource(
|
||||
id: string,
|
||||
callback: (state: ResourceState) => void
|
||||
callback: (state: ResourceState) => void,
|
||||
): () => void {
|
||||
const handler = (resource: ResourceState) => {
|
||||
if (resource.id === id) {
|
||||
@@ -203,13 +216,13 @@ export class ContextManager extends EventEmitter {
|
||||
}
|
||||
};
|
||||
|
||||
this.on('resource_updated', handler);
|
||||
return () => this.off('resource_updated', handler);
|
||||
this.on("resource_updated", handler);
|
||||
return () => this.off("resource_updated", handler);
|
||||
}
|
||||
|
||||
public subscribeToType(
|
||||
type: ResourceType,
|
||||
callback: (state: ResourceState) => void
|
||||
callback: (state: ResourceState) => void,
|
||||
): () => void {
|
||||
const handler = (resource: ResourceState) => {
|
||||
if (resource.type === type) {
|
||||
@@ -217,8 +230,8 @@ export class ContextManager extends EventEmitter {
|
||||
}
|
||||
};
|
||||
|
||||
this.on('resource_updated', handler);
|
||||
return () => this.off('resource_updated', handler);
|
||||
this.on("resource_updated", handler);
|
||||
return () => this.off("resource_updated", handler);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,687 +1,125 @@
|
||||
import { CreateApplication, TServiceParams, ServiceFunction, AlsExtension, GetApisResult, ILogger, InternalDefinition, TContext, TInjectedConfig, TLifecycleBase, TScheduler } from "@digital-alchemy/core";
|
||||
import { Area, Backup, CallProxy, Configure, Device, EntityManager, EventsService, FetchAPI, FetchInternals, Floor, IDByExtension, Label, LIB_HASS, ReferenceService, Registry, WebsocketAPI, Zone } from "@digital-alchemy/hass";
|
||||
import { DomainSchema } from "../schemas.js";
|
||||
import { HASS_CONFIG } from "../config/index.js";
|
||||
import WebSocket from 'ws';
|
||||
import { EventEmitter } from 'events';
|
||||
import * as HomeAssistant from '../types/hass.js';
|
||||
import { HassEntity, HassEvent, HassService } from '../interfaces/hass.js';
|
||||
import type { HassEntity } from "../interfaces/hass.js";
|
||||
|
||||
type Environments = "development" | "production" | "test";
|
||||
class HomeAssistantAPI {
|
||||
private baseUrl: string;
|
||||
private token: string;
|
||||
|
||||
// Define the type for Home Assistant services
|
||||
type HassServiceMethod = (data: Record<string, unknown>) => Promise<void>;
|
||||
constructor() {
|
||||
this.baseUrl = process.env.HASS_HOST || "http://localhost:8123";
|
||||
this.token = process.env.HASS_TOKEN || "";
|
||||
|
||||
type HassServices = {
|
||||
[K in keyof typeof DomainSchema.Values]: {
|
||||
[service: string]: HassServiceMethod;
|
||||
};
|
||||
};
|
||||
|
||||
// Define the type for Home Assistant instance
|
||||
interface HassInstance {
|
||||
states: {
|
||||
get: () => Promise<HassEntity[]>;
|
||||
subscribe: (callback: (states: HassEntity[]) => void) => Promise<number>;
|
||||
unsubscribe: (subscription: number) => void;
|
||||
};
|
||||
services: {
|
||||
get: () => Promise<Record<string, Record<string, HassService>>>;
|
||||
call: (domain: string, service: string, serviceData?: Record<string, any>) => Promise<void>;
|
||||
};
|
||||
connection: {
|
||||
socket: WebSocket;
|
||||
subscribeEvents: (callback: (event: HassEvent) => void, eventType?: string) => Promise<number>;
|
||||
unsubscribeEvents: (subscription: number) => void;
|
||||
};
|
||||
subscribeEvents: (callback: (event: HassEvent) => void, eventType?: string) => Promise<number>;
|
||||
unsubscribeEvents: (subscription: number) => void;
|
||||
if (!this.token || this.token === "your_hass_token_here") {
|
||||
throw new Error("HASS_TOKEN is required but not set in environment variables");
|
||||
}
|
||||
|
||||
// Configuration type for application with more specific constraints
|
||||
type ApplicationConfiguration = {
|
||||
NODE_ENV: ServiceFunction<Environments>;
|
||||
};
|
||||
console.log(`Initializing Home Assistant API with base URL: ${this.baseUrl}`);
|
||||
}
|
||||
|
||||
// Strict configuration type for Home Assistant
|
||||
type HassConfiguration = {
|
||||
BASE_URL: {
|
||||
type: "string";
|
||||
description: string;
|
||||
required: true;
|
||||
default: string;
|
||||
};
|
||||
TOKEN: {
|
||||
type: "string";
|
||||
description: string;
|
||||
required: true;
|
||||
default: string;
|
||||
};
|
||||
SOCKET_URL: {
|
||||
type: "string";
|
||||
description: string;
|
||||
required: true;
|
||||
default: string;
|
||||
};
|
||||
SOCKET_TOKEN: {
|
||||
type: "string";
|
||||
description: string;
|
||||
required: true;
|
||||
default: string;
|
||||
};
|
||||
};
|
||||
|
||||
// application
|
||||
const MY_APP = CreateApplication<ApplicationConfiguration, {}>({
|
||||
configuration: {
|
||||
NODE_ENV: {
|
||||
type: "string",
|
||||
default: "development",
|
||||
enum: ["development", "production", "test"],
|
||||
description: "Code runner addon can set with it's own NODE_ENV",
|
||||
private async fetchApi(endpoint: string, options: RequestInit = {}) {
|
||||
const url = `${this.baseUrl}/api/${endpoint}`;
|
||||
console.log(`Making request to: ${url}`);
|
||||
console.log('Request options:', {
|
||||
method: options.method || 'GET',
|
||||
headers: {
|
||||
Authorization: 'Bearer [REDACTED]',
|
||||
"Content-Type": "application/json",
|
||||
...options.headers,
|
||||
},
|
||||
},
|
||||
services: {
|
||||
NODE_ENV: () => {
|
||||
// Directly return the default value or use process.env
|
||||
return (process.env.NODE_ENV as Environments) || "development";
|
||||
}
|
||||
},
|
||||
libraries: [
|
||||
{
|
||||
...LIB_HASS,
|
||||
configuration: {
|
||||
BASE_URL: {
|
||||
type: "string",
|
||||
description: "Home Assistant base URL",
|
||||
required: true,
|
||||
default: HASS_CONFIG.BASE_URL
|
||||
},
|
||||
TOKEN: {
|
||||
type: "string",
|
||||
description: "Home Assistant long-lived access token",
|
||||
required: true,
|
||||
default: HASS_CONFIG.TOKEN
|
||||
},
|
||||
SOCKET_URL: {
|
||||
type: "string",
|
||||
description: "Home Assistant WebSocket URL",
|
||||
required: true,
|
||||
default: HASS_CONFIG.SOCKET_URL
|
||||
},
|
||||
SOCKET_TOKEN: {
|
||||
type: "string",
|
||||
description: "Home Assistant WebSocket token",
|
||||
required: true,
|
||||
default: HASS_CONFIG.SOCKET_TOKEN
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
name: 'hass' as const
|
||||
body: options.body ? JSON.parse(options.body as string) : undefined
|
||||
});
|
||||
|
||||
export interface HassConfig {
|
||||
host: string;
|
||||
token: string;
|
||||
}
|
||||
|
||||
const CONFIG: Record<string, HassConfig> = {
|
||||
development: {
|
||||
host: process.env.HASS_HOST || 'http://localhost:8123',
|
||||
token: process.env.HASS_TOKEN || ''
|
||||
},
|
||||
production: {
|
||||
host: process.env.HASS_HOST || '',
|
||||
token: process.env.HASS_TOKEN || ''
|
||||
},
|
||||
test: {
|
||||
host: 'http://localhost:8123',
|
||||
token: 'test_token'
|
||||
}
|
||||
};
|
||||
|
||||
export class HassWebSocketClient extends EventEmitter {
|
||||
private ws: WebSocket | null = null;
|
||||
private messageId = 1;
|
||||
private subscriptions = new Map<number, (data: any) => void>();
|
||||
private reconnectAttempts = 0;
|
||||
private options: {
|
||||
autoReconnect: boolean;
|
||||
maxReconnectAttempts: number;
|
||||
reconnectDelay: number;
|
||||
};
|
||||
|
||||
constructor(
|
||||
private url: string,
|
||||
private token: string,
|
||||
options: Partial<typeof HassWebSocketClient.prototype.options> = {}
|
||||
) {
|
||||
super();
|
||||
this.options = {
|
||||
autoReconnect: true,
|
||||
maxReconnectAttempts: 3,
|
||||
reconnectDelay: 1000,
|
||||
...options
|
||||
};
|
||||
}
|
||||
|
||||
async connect(): Promise<void> {
|
||||
if (this.ws && this.ws.readyState === WebSocket.OPEN) {
|
||||
return;
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
this.ws = new WebSocket(this.url);
|
||||
|
||||
this.ws.on('open', () => {
|
||||
this.emit('open');
|
||||
const authMessage: HomeAssistant.AuthMessage = {
|
||||
type: 'auth',
|
||||
access_token: this.token
|
||||
};
|
||||
this.ws?.send(JSON.stringify(authMessage));
|
||||
});
|
||||
|
||||
this.ws.on('message', (data: string) => {
|
||||
try {
|
||||
const message = JSON.parse(data);
|
||||
this.handleMessage(message);
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
"Content-Type": "application/json",
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorText = await response.text();
|
||||
console.error('Home Assistant API error:', {
|
||||
status: response.status,
|
||||
statusText: response.statusText,
|
||||
error: errorText
|
||||
});
|
||||
throw new Error(`Home Assistant API error: ${response.status} ${response.statusText} - ${errorText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
console.log('Response data:', data);
|
||||
return data;
|
||||
} catch (error) {
|
||||
this.emit('error', new Error('Failed to parse message'));
|
||||
}
|
||||
});
|
||||
|
||||
this.ws.on('close', () => {
|
||||
this.emit('disconnected');
|
||||
if (this.options.autoReconnect && this.reconnectAttempts < this.options.maxReconnectAttempts) {
|
||||
setTimeout(() => {
|
||||
this.reconnectAttempts++;
|
||||
this.connect();
|
||||
}, this.options.reconnectDelay);
|
||||
}
|
||||
});
|
||||
|
||||
this.ws.on('error', (error) => {
|
||||
this.emit('error', error);
|
||||
reject(error);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
private handleMessage(message: any): void {
|
||||
switch (message.type) {
|
||||
case 'auth_ok':
|
||||
this.emit('auth_ok');
|
||||
break;
|
||||
case 'auth_invalid':
|
||||
this.emit('auth_invalid');
|
||||
break;
|
||||
case 'result':
|
||||
// Handle command results
|
||||
break;
|
||||
case 'event':
|
||||
if (message.event) {
|
||||
this.emit('event', message.event);
|
||||
const subscription = this.subscriptions.get(message.id);
|
||||
if (subscription) {
|
||||
subscription(message.event.data);
|
||||
}
|
||||
}
|
||||
break;
|
||||
default:
|
||||
this.emit('error', new Error(`Unknown message type: ${message.type}`));
|
||||
console.error('Failed to make request:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async subscribeEvents(callback: (data: any) => void, eventType?: string): Promise<number> {
|
||||
const id = this.messageId++;
|
||||
const message = {
|
||||
id,
|
||||
type: 'subscribe_events',
|
||||
event_type: eventType
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
||||
reject(new Error('WebSocket not connected'));
|
||||
return;
|
||||
async getStates(): Promise<HassEntity[]> {
|
||||
return this.fetchApi("states");
|
||||
}
|
||||
|
||||
this.subscriptions.set(id, callback);
|
||||
this.ws.send(JSON.stringify(message));
|
||||
resolve(id);
|
||||
});
|
||||
}
|
||||
|
||||
async unsubscribeEvents(subscriptionId: number): Promise<void> {
|
||||
const message = {
|
||||
id: this.messageId++,
|
||||
type: 'unsubscribe_events',
|
||||
subscription: subscriptionId
|
||||
};
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!this.ws || this.ws.readyState !== WebSocket.OPEN) {
|
||||
reject(new Error('WebSocket not connected'));
|
||||
return;
|
||||
}
|
||||
|
||||
this.ws.send(JSON.stringify(message));
|
||||
this.subscriptions.delete(subscriptionId);
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
disconnect(): void {
|
||||
if (this.ws) {
|
||||
this.ws.close();
|
||||
this.ws = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class HassInstanceImpl implements HassInstance {
|
||||
public readonly baseUrl: string;
|
||||
public readonly token: string;
|
||||
public wsClient: HassWebSocketClient | undefined;
|
||||
|
||||
public readonly services: HassInstance['services'];
|
||||
public readonly states: HassInstance['states'];
|
||||
public readonly connection: HassInstance['connection'];
|
||||
|
||||
constructor(baseUrl: string, token: string) {
|
||||
this.baseUrl = baseUrl;
|
||||
this.token = token;
|
||||
|
||||
// Initialize services
|
||||
this.services = {
|
||||
get: async () => {
|
||||
const response = await fetch(`${this.baseUrl}/api/services`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch services: ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
},
|
||||
call: async (domain: string, service: string, serviceData?: Record<string, any>) => {
|
||||
const response = await fetch(`${this.baseUrl}/api/services/${domain}/${service}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(serviceData),
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`Service call failed: ${response.statusText}`);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize states
|
||||
this.states = {
|
||||
get: async () => {
|
||||
const response = await fetch(`${this.baseUrl}/api/states`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch states: ${response.statusText}`);
|
||||
}
|
||||
return response.json();
|
||||
},
|
||||
subscribe: async (callback: (states: HassEntity[]) => void) => {
|
||||
return this.subscribeEvents((event: HassEvent) => {
|
||||
if (event.event_type === 'state_changed') {
|
||||
this.states.get().then(callback);
|
||||
}
|
||||
}, 'state_changed');
|
||||
},
|
||||
unsubscribe: (subscription: number) => {
|
||||
this.unsubscribeEvents(subscription);
|
||||
}
|
||||
};
|
||||
|
||||
// Initialize connection
|
||||
this.connection = {
|
||||
socket: new WebSocket(this.baseUrl.replace(/^http/, 'ws') + '/api/websocket'),
|
||||
subscribeEvents: this.subscribeEvents.bind(this),
|
||||
unsubscribeEvents: this.unsubscribeEvents.bind(this)
|
||||
};
|
||||
|
||||
this.initialize();
|
||||
}
|
||||
|
||||
public als!: AlsExtension;
|
||||
public context!: TContext;
|
||||
public event!: EventEmitter<[never]>;
|
||||
public internal!: InternalDefinition;
|
||||
public lifecycle!: TLifecycleBase;
|
||||
public logger!: ILogger;
|
||||
public scheduler!: TScheduler;
|
||||
public config!: TInjectedConfig;
|
||||
public params!: TServiceParams;
|
||||
public hass!: GetApisResult<{
|
||||
area: typeof Area;
|
||||
backup: typeof Backup;
|
||||
call: typeof CallProxy;
|
||||
configure: typeof Configure;
|
||||
device: typeof Device;
|
||||
entity: typeof EntityManager;
|
||||
events: typeof EventsService;
|
||||
fetch: typeof FetchAPI;
|
||||
floor: typeof Floor;
|
||||
idBy: typeof IDByExtension;
|
||||
internals: typeof FetchInternals;
|
||||
label: typeof Label;
|
||||
refBy: typeof ReferenceService;
|
||||
registry: typeof Registry;
|
||||
socket: typeof WebsocketAPI;
|
||||
zone: typeof Zone;
|
||||
}>;
|
||||
|
||||
private initialize() {
|
||||
// Initialize all required properties with proper type instantiation
|
||||
this.als = {} as AlsExtension;
|
||||
this.context = {} as TContext;
|
||||
this.event = new EventEmitter();
|
||||
this.internal = {} as InternalDefinition;
|
||||
this.lifecycle = {} as TLifecycleBase;
|
||||
this.logger = {} as ILogger;
|
||||
this.scheduler = {} as TScheduler;
|
||||
this.config = {} as TInjectedConfig;
|
||||
this.params = {} as TServiceParams;
|
||||
this.hass = {} as GetApisResult<any>;
|
||||
}
|
||||
|
||||
async fetchStates(): Promise<HomeAssistant.Entity[]> {
|
||||
const response = await fetch(`${this.baseUrl}/api/states`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch states: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data as HomeAssistant.Entity[];
|
||||
}
|
||||
|
||||
async fetchState(entityId: string): Promise<HomeAssistant.Entity> {
|
||||
const response = await fetch(`${this.baseUrl}/api/states/${entityId}`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch state: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
return data as HomeAssistant.Entity;
|
||||
async getState(entityId: string): Promise<HassEntity> {
|
||||
return this.fetchApi(`states/${entityId}`);
|
||||
}
|
||||
|
||||
async callService(domain: string, service: string, data: Record<string, any>): Promise<void> {
|
||||
const response = await fetch(`${this.baseUrl}/api/services/${domain}/${service}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Authorization: `Bearer ${this.token}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
await this.fetchApi(`services/${domain}/${service}`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Service call failed: ${response.statusText}`);
|
||||
}
|
||||
}
|
||||
|
||||
async subscribeEvents(callback: (event: HassEvent) => void, eventType?: string): Promise<number> {
|
||||
if (!this.wsClient) {
|
||||
this.wsClient = new HassWebSocketClient(
|
||||
this.baseUrl.replace(/^http/, 'ws') + '/api/websocket',
|
||||
this.token
|
||||
);
|
||||
await this.wsClient.connect();
|
||||
}
|
||||
let instance: HomeAssistantAPI | null = null;
|
||||
|
||||
return this.wsClient.subscribeEvents((data: any) => {
|
||||
const hassEvent: HassEvent = {
|
||||
event_type: data.event_type,
|
||||
data: data.data,
|
||||
origin: data.origin,
|
||||
time_fired: data.time_fired,
|
||||
context: {
|
||||
id: data.context.id,
|
||||
parent_id: data.context.parent_id,
|
||||
user_id: data.context.user_id
|
||||
}
|
||||
};
|
||||
callback(hassEvent);
|
||||
}, eventType);
|
||||
}
|
||||
|
||||
async unsubscribeEvents(subscriptionId: number): Promise<void> {
|
||||
if (this.wsClient) {
|
||||
await this.wsClient.unsubscribeEvents(subscriptionId);
|
||||
export async function get_hass() {
|
||||
if (!instance) {
|
||||
try {
|
||||
instance = new HomeAssistantAPI();
|
||||
// Verify connection by trying to get states
|
||||
await instance.getStates();
|
||||
console.log('Successfully connected to Home Assistant');
|
||||
} catch (error) {
|
||||
console.error('Failed to initialize Home Assistant connection:', error);
|
||||
instance = null;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
return instance;
|
||||
}
|
||||
|
||||
class HomeAssistantInstance implements HassInstance {
|
||||
private messageId = 1;
|
||||
private messageCallbacks = new Map<number, (result: any) => void>();
|
||||
private eventCallbacks = new Map<number, (event: HassEvent) => void>();
|
||||
private stateCallbacks = new Map<number, (states: HassEntity[]) => void>();
|
||||
private _authenticated = false;
|
||||
private socket: WebSocket;
|
||||
private readonly _states: HassInstance['states'];
|
||||
private readonly _services: HassInstance['services'];
|
||||
private readonly _connection: HassInstance['connection'];
|
||||
|
||||
constructor() {
|
||||
if (!HASS_CONFIG.TOKEN) {
|
||||
throw new Error('Home Assistant token is required');
|
||||
// Helper function to call Home Assistant services
|
||||
export async function call_service(
|
||||
domain: string,
|
||||
service: string,
|
||||
data: Record<string, any>,
|
||||
) {
|
||||
const hass = await get_hass();
|
||||
return hass.callService(domain, service, data);
|
||||
}
|
||||
|
||||
this.socket = new WebSocket(HASS_CONFIG.SOCKET_URL);
|
||||
|
||||
this._states = {
|
||||
get: async (): Promise<HassEntity[]> => {
|
||||
const message = {
|
||||
type: 'get_states'
|
||||
};
|
||||
return this.sendMessage(message);
|
||||
},
|
||||
|
||||
subscribe: async (callback: (states: HassEntity[]) => void): Promise<number> => {
|
||||
const id = this.messageId++;
|
||||
this.stateCallbacks.set(id, callback);
|
||||
|
||||
const message = {
|
||||
type: 'subscribe_events',
|
||||
event_type: 'state_changed'
|
||||
};
|
||||
|
||||
await this.sendMessage(message);
|
||||
return id;
|
||||
},
|
||||
|
||||
unsubscribe: (subscription: number): void => {
|
||||
this.stateCallbacks.delete(subscription);
|
||||
}
|
||||
};
|
||||
|
||||
this._services = {
|
||||
get: async (): Promise<Record<string, Record<string, HassService>>> => {
|
||||
const message = {
|
||||
type: 'get_services'
|
||||
};
|
||||
return this.sendMessage(message);
|
||||
},
|
||||
|
||||
call: async (domain: string, service: string, serviceData?: Record<string, any>): Promise<void> => {
|
||||
const message = {
|
||||
type: 'call_service',
|
||||
domain,
|
||||
service,
|
||||
service_data: serviceData
|
||||
};
|
||||
await this.sendMessage(message);
|
||||
}
|
||||
};
|
||||
|
||||
this._connection = {
|
||||
socket: this.socket,
|
||||
subscribeEvents: this.subscribeEvents.bind(this),
|
||||
unsubscribeEvents: this.unsubscribeEvents.bind(this)
|
||||
};
|
||||
|
||||
this.setupWebSocket();
|
||||
// Helper function to list devices
|
||||
export async function list_devices() {
|
||||
const hass = await get_hass();
|
||||
const states = await hass.getStates();
|
||||
return states.map((state: HassEntity) => ({
|
||||
entity_id: state.entity_id,
|
||||
state: state.state,
|
||||
attributes: state.attributes
|
||||
}));
|
||||
}
|
||||
|
||||
get authenticated(): boolean {
|
||||
return this._authenticated;
|
||||
// Helper function to get entity states
|
||||
export async function get_states() {
|
||||
const hass = await get_hass();
|
||||
return hass.getStates();
|
||||
}
|
||||
|
||||
get states(): HassInstance['states'] {
|
||||
return this._states;
|
||||
}
|
||||
|
||||
get services(): HassInstance['services'] {
|
||||
return this._services;
|
||||
}
|
||||
|
||||
get connection(): HassInstance['connection'] {
|
||||
return this._connection;
|
||||
}
|
||||
|
||||
private setupWebSocket() {
|
||||
this.socket.on('open', () => {
|
||||
this.authenticate();
|
||||
});
|
||||
|
||||
this.socket.on('message', (data: WebSocket.Data) => {
|
||||
if (typeof data === 'string') {
|
||||
const message = JSON.parse(data);
|
||||
this.handleMessage(message);
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('close', () => {
|
||||
console.log('WebSocket connection closed');
|
||||
// Implement reconnection logic here
|
||||
});
|
||||
|
||||
this.socket.on('error', (error) => {
|
||||
console.error('WebSocket error:', error);
|
||||
});
|
||||
}
|
||||
|
||||
private authenticate() {
|
||||
const auth = {
|
||||
type: 'auth',
|
||||
access_token: HASS_CONFIG.TOKEN
|
||||
};
|
||||
this.socket.send(JSON.stringify(auth));
|
||||
}
|
||||
|
||||
private handleMessage(message: any) {
|
||||
if (message.type === 'auth_ok') {
|
||||
this._authenticated = true;
|
||||
console.log('Authenticated with Home Assistant');
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'auth_invalid') {
|
||||
console.error('Authentication failed:', message.message);
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'event') {
|
||||
const callback = this.eventCallbacks.get(message.id);
|
||||
if (callback) {
|
||||
callback(message.event);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === 'result') {
|
||||
const callback = this.messageCallbacks.get(message.id);
|
||||
if (callback) {
|
||||
callback(message.result);
|
||||
this.messageCallbacks.delete(message.id);
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private async sendMessage(message: any): Promise<any> {
|
||||
if (!this._authenticated) {
|
||||
throw new Error('Not authenticated with Home Assistant');
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const id = this.messageId++;
|
||||
message.id = id;
|
||||
|
||||
this.messageCallbacks.set(id, resolve);
|
||||
this.socket.send(JSON.stringify(message));
|
||||
|
||||
// Add timeout
|
||||
setTimeout(() => {
|
||||
this.messageCallbacks.delete(id);
|
||||
reject(new Error('Message timeout'));
|
||||
}, 10000);
|
||||
});
|
||||
}
|
||||
|
||||
public async subscribeEvents(callback: (event: HassEvent) => void, eventType?: string): Promise<number> {
|
||||
const id = this.messageId++;
|
||||
this.eventCallbacks.set(id, callback);
|
||||
|
||||
const message = {
|
||||
type: 'subscribe_events',
|
||||
event_type: eventType
|
||||
};
|
||||
|
||||
await this.sendMessage(message);
|
||||
return id;
|
||||
}
|
||||
|
||||
public unsubscribeEvents(subscription: number): void {
|
||||
this.eventCallbacks.delete(subscription);
|
||||
}
|
||||
}
|
||||
|
||||
let hassInstance: HomeAssistantInstance | null = null;
|
||||
|
||||
export async function get_hass(): Promise<HassInstance> {
|
||||
if (!hassInstance) {
|
||||
hassInstance = new HomeAssistantInstance();
|
||||
// Wait for authentication
|
||||
await new Promise<void>((resolve) => {
|
||||
const checkAuth = () => {
|
||||
if (hassInstance?.authenticated) {
|
||||
resolve();
|
||||
} else {
|
||||
setTimeout(checkAuth, 100);
|
||||
}
|
||||
};
|
||||
checkAuth();
|
||||
});
|
||||
}
|
||||
return hassInstance;
|
||||
// Helper function to get a specific entity state
|
||||
export async function get_state(entity_id: string) {
|
||||
const hass = await get_hass();
|
||||
return hass.getState(entity_id);
|
||||
}
|
||||
@@ -1,14 +1,14 @@
|
||||
const check = async () => {
|
||||
try {
|
||||
const response = await fetch('http://localhost:3000/health');
|
||||
const response = await fetch("http://localhost:3000/health");
|
||||
if (!response.ok) {
|
||||
console.error('Health check failed:', response.status);
|
||||
console.error("Health check failed:", response.status);
|
||||
process.exit(1);
|
||||
}
|
||||
console.log('Health check passed');
|
||||
console.log("Health check passed");
|
||||
process.exit(0);
|
||||
} catch (error) {
|
||||
console.error('Health check failed:', error);
|
||||
console.error("Health check failed:", error);
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
230
src/index.ts
230
src/index.ts
@@ -1,73 +1,169 @@
|
||||
/**
|
||||
* Home Assistant MCP (Master Control Program)
|
||||
* Main application entry point
|
||||
*
|
||||
* This file initializes the Express server and sets up necessary
|
||||
* middleware and routes for the application when not in Claude mode.
|
||||
*
|
||||
* @module index
|
||||
*/
|
||||
import "./polyfills.js";
|
||||
import { config } from "dotenv";
|
||||
import { resolve } from "path";
|
||||
import { Elysia } from "elysia";
|
||||
import { cors } from "@elysiajs/cors";
|
||||
import { swagger } from "@elysiajs/swagger";
|
||||
import {
|
||||
rateLimiter,
|
||||
securityHeaders,
|
||||
validateRequest,
|
||||
sanitizeInput,
|
||||
errorHandler,
|
||||
} from "./security/index.js";
|
||||
import {
|
||||
get_hass,
|
||||
call_service,
|
||||
list_devices,
|
||||
get_states,
|
||||
get_state,
|
||||
} from "./hass/index.js";
|
||||
import { z } from "zod";
|
||||
import {
|
||||
commonCommands,
|
||||
coverCommands,
|
||||
climateCommands,
|
||||
type Command,
|
||||
} from "./commands.js";
|
||||
import { speechService } from "./speech/index.js";
|
||||
import { APP_CONFIG } from "./config/app.config.js";
|
||||
|
||||
import express from 'express';
|
||||
import { APP_CONFIG } from './config/app.config.js';
|
||||
import { apiRoutes } from './routes/index.js';
|
||||
import { securityHeaders, rateLimiter, validateRequest, sanitizeInput, errorHandler } from './security/index.js';
|
||||
import { requestLogger, errorLogger } from './middleware/logging.middleware.js';
|
||||
import { get_hass } from './hass/index.js';
|
||||
import { LiteMCP } from 'litemcp';
|
||||
import { logger } from './utils/logger.js';
|
||||
import { initLogRotation } from './utils/log-rotation.js';
|
||||
// Load environment variables based on NODE_ENV
|
||||
const envFile =
|
||||
process.env.NODE_ENV === "production"
|
||||
? ".env"
|
||||
: process.env.NODE_ENV === "test"
|
||||
? ".env.test"
|
||||
: ".env.development";
|
||||
|
||||
logger.info('Starting Home Assistant MCP...');
|
||||
logger.info('Initializing Home Assistant connection...');
|
||||
console.log(`Loading environment from ${envFile}`);
|
||||
config({ path: resolve(process.cwd(), envFile) });
|
||||
|
||||
// Initialize log rotation
|
||||
initLogRotation();
|
||||
// Configuration
|
||||
const HASS_TOKEN = process.env.HASS_TOKEN;
|
||||
const PORT = parseInt(process.env.PORT || "4000", 10);
|
||||
|
||||
/**
|
||||
* Initialize LiteMCP instance
|
||||
* This provides the core MCP functionality
|
||||
*/
|
||||
const server = new LiteMCP('home-assistant', APP_CONFIG.VERSION);
|
||||
console.log("Initializing Home Assistant connection...");
|
||||
|
||||
// Only start Express server when not in Claude mode
|
||||
if (process.env.PROCESSOR_TYPE !== 'claude') {
|
||||
/**
|
||||
* Initialize Express application with security middleware
|
||||
* and route handlers
|
||||
*/
|
||||
const app = express();
|
||||
|
||||
// Apply logging middleware first to catch all requests
|
||||
app.use(requestLogger);
|
||||
|
||||
// Apply security middleware
|
||||
app.use(securityHeaders);
|
||||
app.use(rateLimiter);
|
||||
app.use(express.json());
|
||||
app.use(validateRequest);
|
||||
app.use(sanitizeInput);
|
||||
|
||||
/**
|
||||
* Mount API routes under /api
|
||||
* All API endpoints are prefixed with /api
|
||||
*/
|
||||
app.use('/api', apiRoutes);
|
||||
|
||||
/**
|
||||
* Apply error handling middleware
|
||||
* This should be the last middleware in the chain
|
||||
*/
|
||||
app.use(errorLogger);
|
||||
app.use(errorHandler);
|
||||
|
||||
/**
|
||||
* Start the server and listen for incoming connections
|
||||
* The port is configured in the environment variables
|
||||
*/
|
||||
app.listen(APP_CONFIG.PORT, () => {
|
||||
logger.info(`Server is running on port ${APP_CONFIG.PORT}`);
|
||||
});
|
||||
} else {
|
||||
logger.info('Running in Claude mode - Express server disabled');
|
||||
// Define Tool interface
|
||||
interface Tool {
|
||||
name: string;
|
||||
description: string;
|
||||
parameters: z.ZodType<any>;
|
||||
execute: (params: any) => Promise<any>;
|
||||
}
|
||||
|
||||
// Array to store tools
|
||||
const tools: Tool[] = [];
|
||||
|
||||
// Define the list devices tool
|
||||
const listDevicesTool: Tool = {
|
||||
name: "list_devices",
|
||||
description: "List all available Home Assistant devices",
|
||||
parameters: z.object({}),
|
||||
execute: async () => {
|
||||
try {
|
||||
const devices = await list_devices();
|
||||
return {
|
||||
success: true,
|
||||
devices,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Add tools to the array
|
||||
tools.push(listDevicesTool);
|
||||
|
||||
// Add the Home Assistant control tool
|
||||
const controlTool: Tool = {
|
||||
name: "control",
|
||||
description: "Control Home Assistant devices and services",
|
||||
parameters: z.object({
|
||||
command: z.enum([
|
||||
...commonCommands,
|
||||
...coverCommands,
|
||||
...climateCommands,
|
||||
] as [string, ...string[]]),
|
||||
entity_id: z.string().describe("The ID of the entity to control"),
|
||||
}),
|
||||
execute: async (params: { command: Command; entity_id: string }) => {
|
||||
try {
|
||||
const [domain] = params.entity_id.split(".");
|
||||
await call_service(domain, params.command, {
|
||||
entity_id: params.entity_id,
|
||||
});
|
||||
return {
|
||||
success: true,
|
||||
message: `Command ${params.command} executed successfully on ${params.entity_id}`,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Add the control tool to the array
|
||||
tools.push(controlTool);
|
||||
|
||||
// Initialize Elysia app with middleware
|
||||
const app = new Elysia()
|
||||
.use(cors())
|
||||
.use(swagger())
|
||||
.use(rateLimiter)
|
||||
.use(securityHeaders)
|
||||
.use(validateRequest)
|
||||
.use(sanitizeInput)
|
||||
.use(errorHandler);
|
||||
|
||||
// Health check endpoint
|
||||
app.get("/health", () => ({
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: "0.1.0",
|
||||
speech_enabled: APP_CONFIG.SPEECH.ENABLED,
|
||||
wake_word_enabled: APP_CONFIG.SPEECH.WAKE_WORD_ENABLED,
|
||||
speech_to_text_enabled: APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED,
|
||||
}));
|
||||
|
||||
// Initialize speech service if enabled
|
||||
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||
console.log("Initializing speech service...");
|
||||
speechService.initialize().catch((error) => {
|
||||
console.error("Failed to initialize speech service:", error);
|
||||
});
|
||||
}
|
||||
|
||||
// Create API endpoints for each tool
|
||||
tools.forEach((tool) => {
|
||||
app.post(`/api/tools/${tool.name}`, async ({ body }: { body: Record<string, unknown> }) => {
|
||||
const result = await tool.execute(body);
|
||||
return result;
|
||||
});
|
||||
});
|
||||
|
||||
// Start the server
|
||||
app.listen(PORT, () => {
|
||||
console.log(`Server is running on port ${PORT}`);
|
||||
});
|
||||
|
||||
// Handle server shutdown
|
||||
process.on("SIGTERM", async () => {
|
||||
console.log("Received SIGTERM. Shutting down gracefully...");
|
||||
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||
await speechService.shutdown().catch((error) => {
|
||||
console.error("Error shutting down speech service:", error);
|
||||
});
|
||||
}
|
||||
process.exit(0);
|
||||
});
|
||||
|
||||
@@ -29,7 +29,10 @@ export interface HassInstance {
|
||||
states: HassStates;
|
||||
services: HassServices;
|
||||
connection: HassConnection;
|
||||
subscribeEvents: (callback: (event: HassEvent) => void, eventType?: string) => Promise<number>;
|
||||
subscribeEvents: (
|
||||
callback: (event: HassEvent) => void,
|
||||
eventType?: string,
|
||||
) => Promise<number>;
|
||||
unsubscribeEvents: (subscription: number) => void;
|
||||
}
|
||||
|
||||
@@ -41,12 +44,19 @@ export interface HassStates {
|
||||
|
||||
export interface HassServices {
|
||||
get: () => Promise<Record<string, Record<string, HassService>>>;
|
||||
call: (domain: string, service: string, serviceData?: Record<string, any>) => Promise<void>;
|
||||
call: (
|
||||
domain: string,
|
||||
service: string,
|
||||
serviceData?: Record<string, any>,
|
||||
) => Promise<void>;
|
||||
}
|
||||
|
||||
export interface HassConnection {
|
||||
socket: WebSocket;
|
||||
subscribeEvents: (callback: (event: HassEvent) => void, eventType?: string) => Promise<number>;
|
||||
subscribeEvents: (
|
||||
callback: (event: HassEvent) => void,
|
||||
eventType?: string,
|
||||
) => Promise<number>;
|
||||
unsubscribeEvents: (subscription: number) => void;
|
||||
}
|
||||
|
||||
@@ -58,13 +68,16 @@ export interface HassService {
|
||||
domain: string[];
|
||||
};
|
||||
};
|
||||
fields: Record<string, {
|
||||
fields: Record<
|
||||
string,
|
||||
{
|
||||
name: string;
|
||||
description: string;
|
||||
required?: boolean;
|
||||
example?: any;
|
||||
selector?: any;
|
||||
}>;
|
||||
}
|
||||
>;
|
||||
}
|
||||
|
||||
export interface HassEvent {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { z } from 'zod';
|
||||
import { z } from "zod";
|
||||
|
||||
// Tool interfaces
|
||||
export interface Tool {
|
||||
@@ -39,8 +39,8 @@ export type {
|
||||
HassService,
|
||||
HassEvent,
|
||||
HassEntity,
|
||||
HassState
|
||||
} from './hass.js';
|
||||
HassState,
|
||||
} from "./hass.js";
|
||||
|
||||
// Home Assistant interfaces
|
||||
export interface HassAddon {
|
||||
@@ -92,7 +92,7 @@ export interface HacsResponse {
|
||||
export interface AutomationConfig {
|
||||
alias: string;
|
||||
description?: string;
|
||||
mode?: 'single' | 'parallel' | 'queued' | 'restart';
|
||||
mode?: "single" | "parallel" | "queued" | "restart";
|
||||
trigger: any[];
|
||||
condition?: any[];
|
||||
action: any[];
|
||||
@@ -125,7 +125,7 @@ export interface HistoryParams {
|
||||
|
||||
// Scene interfaces
|
||||
export interface SceneParams {
|
||||
action: 'list' | 'activate';
|
||||
action: "list" | "activate";
|
||||
scene_id?: string;
|
||||
}
|
||||
|
||||
@@ -139,30 +139,43 @@ export interface NotifyParams {
|
||||
|
||||
// Automation parameter interfaces
|
||||
export interface AutomationParams {
|
||||
action: 'list' | 'toggle' | 'trigger';
|
||||
action: "list" | "toggle" | "trigger";
|
||||
automation_id?: string;
|
||||
}
|
||||
|
||||
export interface AddonParams {
|
||||
action: 'list' | 'info' | 'install' | 'uninstall' | 'start' | 'stop' | 'restart';
|
||||
action:
|
||||
| "list"
|
||||
| "info"
|
||||
| "install"
|
||||
| "uninstall"
|
||||
| "start"
|
||||
| "stop"
|
||||
| "restart";
|
||||
slug?: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface PackageParams {
|
||||
action: 'list' | 'install' | 'uninstall' | 'update';
|
||||
category: 'integration' | 'plugin' | 'theme' | 'python_script' | 'appdaemon' | 'netdaemon';
|
||||
action: "list" | "install" | "uninstall" | "update";
|
||||
category:
|
||||
| "integration"
|
||||
| "plugin"
|
||||
| "theme"
|
||||
| "python_script"
|
||||
| "appdaemon"
|
||||
| "netdaemon";
|
||||
repository?: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface AutomationConfigParams {
|
||||
action: 'create' | 'update' | 'delete' | 'duplicate';
|
||||
action: "create" | "update" | "delete" | "duplicate";
|
||||
automation_id?: string;
|
||||
config?: {
|
||||
alias: string;
|
||||
description?: string;
|
||||
mode?: 'single' | 'parallel' | 'queued' | 'restart';
|
||||
mode?: "single" | "parallel" | "queued" | "restart";
|
||||
trigger: any[];
|
||||
condition?: any[];
|
||||
action: any[];
|
||||
|
||||
67
src/mcp/litemcp.ts
Normal file
67
src/mcp/litemcp.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
export class LiteMCP extends EventEmitter {
|
||||
private static instance: LiteMCP;
|
||||
private constructor() {
|
||||
super();
|
||||
// Initialize with default configuration
|
||||
this.configure({});
|
||||
}
|
||||
|
||||
public static getInstance(): LiteMCP {
|
||||
if (!LiteMCP.instance) {
|
||||
LiteMCP.instance = new LiteMCP();
|
||||
}
|
||||
return LiteMCP.instance;
|
||||
}
|
||||
|
||||
public configure(config: Record<string, any>): void {
|
||||
// Store configuration
|
||||
this.config = {
|
||||
...this.defaultConfig,
|
||||
...config,
|
||||
};
|
||||
}
|
||||
|
||||
private config: Record<string, any> = {};
|
||||
private defaultConfig = {
|
||||
maxRetries: 3,
|
||||
retryDelay: 1000,
|
||||
timeout: 5000,
|
||||
};
|
||||
|
||||
public async execute(
|
||||
command: string,
|
||||
params: Record<string, any> = {},
|
||||
): Promise<any> {
|
||||
try {
|
||||
// Emit command execution event
|
||||
this.emit("command", { command, params });
|
||||
|
||||
// Execute command logic here
|
||||
const result = await this.processCommand(command, params);
|
||||
|
||||
// Emit success event
|
||||
this.emit("success", { command, params, result });
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
// Emit error event
|
||||
this.emit("error", { command, params, error });
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async processCommand(
|
||||
command: string,
|
||||
params: Record<string, any>,
|
||||
): Promise<any> {
|
||||
// Command processing logic
|
||||
return { command, params, status: "processed" };
|
||||
}
|
||||
|
||||
public async shutdown(): Promise<void> {
|
||||
// Cleanup logic
|
||||
this.removeAllListeners();
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { z } from 'zod';
|
||||
import { DomainSchema } from '../schemas.js';
|
||||
import { z } from "zod";
|
||||
import { DomainSchema } from "../schemas.js";
|
||||
|
||||
export const MCP_SCHEMA = {
|
||||
tools: [
|
||||
@@ -24,14 +24,14 @@ export const MCP_SCHEMA = {
|
||||
"vacuum",
|
||||
"scene",
|
||||
"script",
|
||||
"camera"
|
||||
]
|
||||
"camera",
|
||||
],
|
||||
},
|
||||
area: { type: "string" },
|
||||
floor: { type: "string" }
|
||||
floor: { type: "string" },
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
required: []
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "control",
|
||||
@@ -53,8 +53,8 @@ export const MCP_SCHEMA = {
|
||||
"set_temperature",
|
||||
"set_hvac_mode",
|
||||
"set_fan_mode",
|
||||
"set_humidity"
|
||||
]
|
||||
"set_humidity",
|
||||
],
|
||||
},
|
||||
entity_id: { type: "string" },
|
||||
state: { type: "string" },
|
||||
@@ -64,7 +64,7 @@ export const MCP_SCHEMA = {
|
||||
type: "array",
|
||||
items: { type: "number" },
|
||||
minItems: 3,
|
||||
maxItems: 3
|
||||
maxItems: 3,
|
||||
},
|
||||
position: { type: "number" },
|
||||
tilt_position: { type: "number" },
|
||||
@@ -73,10 +73,10 @@ export const MCP_SCHEMA = {
|
||||
target_temp_low: { type: "number" },
|
||||
hvac_mode: { type: "string" },
|
||||
fan_mode: { type: "string" },
|
||||
humidity: { type: "number" }
|
||||
humidity: { type: "number" },
|
||||
},
|
||||
required: ["command", "entity_id"],
|
||||
},
|
||||
required: ["command", "entity_id"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "subscribe_events",
|
||||
@@ -86,13 +86,13 @@ export const MCP_SCHEMA = {
|
||||
properties: {
|
||||
events: {
|
||||
type: "array",
|
||||
items: { type: "string" }
|
||||
items: { type: "string" },
|
||||
},
|
||||
entity_id: { type: "string" },
|
||||
domain: { type: "string" }
|
||||
domain: { type: "string" },
|
||||
},
|
||||
required: [],
|
||||
},
|
||||
required: []
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "get_sse_stats",
|
||||
@@ -100,8 +100,8 @@ export const MCP_SCHEMA = {
|
||||
parameters: {
|
||||
type: "object",
|
||||
properties: {},
|
||||
required: []
|
||||
}
|
||||
required: [],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "automation_config",
|
||||
@@ -111,7 +111,7 @@ export const MCP_SCHEMA = {
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: ["list", "toggle", "trigger", "create", "update", "delete"]
|
||||
enum: ["list", "toggle", "trigger", "create", "update", "delete"],
|
||||
},
|
||||
automation_id: { type: "string" },
|
||||
config: {
|
||||
@@ -121,17 +121,17 @@ export const MCP_SCHEMA = {
|
||||
description: { type: "string" },
|
||||
mode: {
|
||||
type: "string",
|
||||
enum: ["single", "parallel", "queued", "restart"]
|
||||
enum: ["single", "parallel", "queued", "restart"],
|
||||
},
|
||||
trigger: { type: "array" },
|
||||
condition: { type: "array" },
|
||||
action: { type: "array" }
|
||||
action: { type: "array" },
|
||||
},
|
||||
required: ["alias", "trigger", "action"]
|
||||
}
|
||||
required: ["alias", "trigger", "action"],
|
||||
},
|
||||
},
|
||||
required: ["action"],
|
||||
},
|
||||
required: ["action"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "addon_management",
|
||||
@@ -141,13 +141,21 @@ export const MCP_SCHEMA = {
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: ["list", "info", "install", "uninstall", "start", "stop", "restart"]
|
||||
enum: [
|
||||
"list",
|
||||
"info",
|
||||
"install",
|
||||
"uninstall",
|
||||
"start",
|
||||
"stop",
|
||||
"restart",
|
||||
],
|
||||
},
|
||||
slug: { type: "string" },
|
||||
version: { type: "string" }
|
||||
version: { type: "string" },
|
||||
},
|
||||
required: ["action"],
|
||||
},
|
||||
required: ["action"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "package_management",
|
||||
@@ -157,17 +165,24 @@ export const MCP_SCHEMA = {
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: ["list", "install", "uninstall", "update"]
|
||||
enum: ["list", "install", "uninstall", "update"],
|
||||
},
|
||||
category: {
|
||||
type: "string",
|
||||
enum: ["integration", "plugin", "theme", "python_script", "appdaemon", "netdaemon"]
|
||||
enum: [
|
||||
"integration",
|
||||
"plugin",
|
||||
"theme",
|
||||
"python_script",
|
||||
"appdaemon",
|
||||
"netdaemon",
|
||||
],
|
||||
},
|
||||
repository: { type: "string" },
|
||||
version: { type: "string" }
|
||||
version: { type: "string" },
|
||||
},
|
||||
required: ["action", "category"],
|
||||
},
|
||||
required: ["action", "category"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "scene_control",
|
||||
@@ -177,12 +192,12 @@ export const MCP_SCHEMA = {
|
||||
properties: {
|
||||
action: {
|
||||
type: "string",
|
||||
enum: ["list", "activate"]
|
||||
enum: ["list", "activate"],
|
||||
},
|
||||
scene_id: { type: "string" }
|
||||
scene_id: { type: "string" },
|
||||
},
|
||||
required: ["action"],
|
||||
},
|
||||
required: ["action"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "notify",
|
||||
@@ -195,11 +210,11 @@ export const MCP_SCHEMA = {
|
||||
target: { type: "string" },
|
||||
data: {
|
||||
type: "object",
|
||||
additionalProperties: true
|
||||
}
|
||||
additionalProperties: true,
|
||||
},
|
||||
},
|
||||
required: ["message"],
|
||||
},
|
||||
required: ["message"]
|
||||
}
|
||||
},
|
||||
{
|
||||
name: "history",
|
||||
@@ -211,147 +226,173 @@ export const MCP_SCHEMA = {
|
||||
start_time: { type: "string" },
|
||||
end_time: { type: "string" },
|
||||
minimal_response: { type: "boolean" },
|
||||
significant_changes_only: { type: "boolean" }
|
||||
significant_changes_only: { type: "boolean" },
|
||||
},
|
||||
required: ["entity_id"],
|
||||
},
|
||||
},
|
||||
required: ["entity_id"]
|
||||
}
|
||||
}
|
||||
],
|
||||
prompts: [
|
||||
{
|
||||
name: "claude",
|
||||
description: "Claude-specific prompt template for home automation control",
|
||||
description:
|
||||
"Claude-specific prompt template for home automation control",
|
||||
template: {
|
||||
system: "You are Claude, an AI assistant specialized in home automation control through natural language. Your role is to interpret user commands and translate them into specific device control actions. Always maintain context awareness and consider user preferences and patterns. Provide clear, concise responses and suggest relevant follow-up actions.",
|
||||
system:
|
||||
"You are Claude, an AI assistant specialized in home automation control through natural language. Your role is to interpret user commands and translate them into specific device control actions. Always maintain context awareness and consider user preferences and patterns. Provide clear, concise responses and suggest relevant follow-up actions.",
|
||||
user: "Control the {device_name} in the {location} by {action} with parameters: {parameters}. Current context: {context}",
|
||||
examples: [
|
||||
{
|
||||
user: "Turn on the living room lights",
|
||||
assistant: "I'll turn on the lights in the living room. Would you like me to set a specific brightness level?"
|
||||
assistant:
|
||||
"I'll turn on the lights in the living room. Would you like me to set a specific brightness level?",
|
||||
},
|
||||
{
|
||||
user: "Set the temperature to 72 degrees",
|
||||
assistant: "I'll set the temperature to 72°F. I'll monitor the temperature and let you know when it reaches the target."
|
||||
assistant:
|
||||
"I'll set the temperature to 72°F. I'll monitor the temperature and let you know when it reaches the target.",
|
||||
},
|
||||
{
|
||||
user: "Open the kitchen blinds halfway",
|
||||
assistant: "Setting the kitchen blinds to 50% open position. Would you like me to adjust the slat angle as well?"
|
||||
assistant:
|
||||
"Setting the kitchen blinds to 50% open position. Would you like me to adjust the slat angle as well?",
|
||||
},
|
||||
{
|
||||
user: "Start the coffee maker",
|
||||
assistant: "I'll start the coffee maker in the kitchen. Based on your usual preferences, I'll use the strong brew setting. Would you like me to adjust any settings?"
|
||||
assistant:
|
||||
"I'll start the coffee maker in the kitchen. Based on your usual preferences, I'll use the strong brew setting. Would you like me to adjust any settings?",
|
||||
},
|
||||
{
|
||||
user: "Set up the house for bedtime",
|
||||
assistant: "I'll activate the bedtime routine: dimming hallway lights to 20%, turning off all other lights, setting temperature to your preferred sleeping temperature of 68°F, locking all doors, and arming the security system."
|
||||
assistant:
|
||||
"I'll activate the bedtime routine: dimming hallway lights to 20%, turning off all other lights, setting temperature to your preferred sleeping temperature of 68°F, locking all doors, and arming the security system.",
|
||||
},
|
||||
{
|
||||
user: "Play some music in the bedroom",
|
||||
assistant: "I'll start playing music on the bedroom speaker. Based on this time of day and your preferences, I'll start your 'Evening Relaxation' playlist at 40% volume. Would you like to adjust any of these settings?"
|
||||
assistant:
|
||||
"I'll start playing music on the bedroom speaker. Based on this time of day and your preferences, I'll start your 'Evening Relaxation' playlist at 40% volume. Would you like to adjust any of these settings?",
|
||||
},
|
||||
{
|
||||
user: "Check if I left any windows open",
|
||||
assistant: "I'll check all window sensors. The kitchen window and guest bedroom window are currently open. Would you like me to notify you if any windows remain open after sunset?"
|
||||
}
|
||||
]
|
||||
}
|
||||
assistant:
|
||||
"I'll check all window sensors. The kitchen window and guest bedroom window are currently open. Would you like me to notify you if any windows remain open after sunset?",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "gpt4",
|
||||
description: "GPT-4 specific prompt template for precise home automation control",
|
||||
description:
|
||||
"GPT-4 specific prompt template for precise home automation control",
|
||||
template: {
|
||||
system: "You are a home automation assistant powered by GPT-4. Focus on precise command interpretation and execution. Maintain high accuracy in device control and parameter settings. Provide feedback on action success and system state changes.",
|
||||
system:
|
||||
"You are a home automation assistant powered by GPT-4. Focus on precise command interpretation and execution. Maintain high accuracy in device control and parameter settings. Provide feedback on action success and system state changes.",
|
||||
user: "Command: {action} {device_name} in {location}\nParameters: {parameters}\nContext: {context}",
|
||||
examples: [
|
||||
{
|
||||
user: "Dim the bedroom lights to 50%",
|
||||
assistant: "Setting bedroom light brightness to 50%. The change has been applied successfully."
|
||||
assistant:
|
||||
"Setting bedroom light brightness to 50%. The change has been applied successfully.",
|
||||
},
|
||||
{
|
||||
user: "Start the evening routine",
|
||||
assistant: "Initiating evening routine: dimming lights, adjusting temperature, and enabling security system."
|
||||
assistant:
|
||||
"Initiating evening routine: dimming lights, adjusting temperature, and enabling security system.",
|
||||
},
|
||||
{
|
||||
user: "Set living room AC to 74 degrees and high fan",
|
||||
assistant: "Configuring living room climate control: temperature set to 74°F, fan mode set to high. Current temperature is 78°F, estimated time to target: 15 minutes."
|
||||
assistant:
|
||||
"Configuring living room climate control: temperature set to 74°F, fan mode set to high. Current temperature is 78°F, estimated time to target: 15 minutes.",
|
||||
},
|
||||
{
|
||||
user: "Lock all doors and windows",
|
||||
assistant: "Security check initiated. All 4 doors locked successfully. 6/7 windows locked. Alert: Kitchen window cannot be locked remotely - manual intervention required."
|
||||
assistant:
|
||||
"Security check initiated. All 4 doors locked successfully. 6/7 windows locked. Alert: Kitchen window cannot be locked remotely - manual intervention required.",
|
||||
},
|
||||
{
|
||||
user: "Turn on outdoor lights at sunset",
|
||||
assistant: "Scheduled task created. Outdoor lights (porch, pathway, garage) will activate at sunset (7:45 PM today). Current light sensor reading: 24,000 lux."
|
||||
assistant:
|
||||
"Scheduled task created. Outdoor lights (porch, pathway, garage) will activate at sunset (7:45 PM today). Current light sensor reading: 24,000 lux.",
|
||||
},
|
||||
{
|
||||
user: "Set up the media room for movie night",
|
||||
assistant: "Executing media room configuration: TV power on, input set to HDMI 2, surround sound activated at 40%, lights dimmed to 10%, blackout blinds closed. Room temperature adjusted to 72°F."
|
||||
assistant:
|
||||
"Executing media room configuration: TV power on, input set to HDMI 2, surround sound activated at 40%, lights dimmed to 10%, blackout blinds closed. Room temperature adjusted to 72°F.",
|
||||
},
|
||||
{
|
||||
user: "Show me energy usage for today",
|
||||
assistant: "Retrieving energy data: Total usage today: 24.5 kWh. Peak consumption: 3.2 kWh (2-3 PM). Major consumers: HVAC (45%), Water Heater (25%), Kitchen Appliances (15%)."
|
||||
}
|
||||
]
|
||||
}
|
||||
assistant:
|
||||
"Retrieving energy data: Total usage today: 24.5 kWh. Peak consumption: 3.2 kWh (2-3 PM). Major consumers: HVAC (45%), Water Heater (25%), Kitchen Appliances (15%).",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "custom",
|
||||
description: "Customizable prompt template for home automation control",
|
||||
template: {
|
||||
system: "Custom home automation assistant configuration. Adapt to user preferences and patterns. Learn from interactions and optimize responses. Provide detailed feedback and suggestions.",
|
||||
system:
|
||||
"Custom home automation assistant configuration. Adapt to user preferences and patterns. Learn from interactions and optimize responses. Provide detailed feedback and suggestions.",
|
||||
user: "Action requested: {action}\nTarget device: {device_name}\nLocation: {location}\nParameters: {parameters}\nCurrent context: {context}",
|
||||
examples: [
|
||||
{
|
||||
user: "Make it cooler in here",
|
||||
assistant: "Based on your preferences, I'll lower the temperature by 2 degrees. Current temperature is 74°F, adjusting to 72°F."
|
||||
assistant:
|
||||
"Based on your preferences, I'll lower the temperature by 2 degrees. Current temperature is 74°F, adjusting to 72°F.",
|
||||
},
|
||||
{
|
||||
user: "Set up movie mode",
|
||||
assistant: "Activating movie mode: dimming lights to 20%, closing blinds, setting TV input to HDMI 1, and adjusting sound system."
|
||||
assistant:
|
||||
"Activating movie mode: dimming lights to 20%, closing blinds, setting TV input to HDMI 1, and adjusting sound system.",
|
||||
},
|
||||
{
|
||||
user: "I'm heading to bed",
|
||||
assistant: "Initiating personalized bedtime sequence: Setting master bedroom temperature to 68°F, activating white noise machine at volume level 3, dimming hallway lights to 15%, turning off all other lights, closing all blinds, and enabling sleep mode on your phone charger."
|
||||
assistant:
|
||||
"Initiating personalized bedtime sequence: Setting master bedroom temperature to 68°F, activating white noise machine at volume level 3, dimming hallway lights to 15%, turning off all other lights, closing all blinds, and enabling sleep mode on your phone charger.",
|
||||
},
|
||||
{
|
||||
user: "Start my morning routine",
|
||||
assistant: "Good morning! Executing your weekday morning routine: Gradually increasing bedroom lights (now at 30%), starting coffee maker with your weekday strength setting, setting bathroom heater to 72°F, and pulling up today's weather and calendar on your kitchen display."
|
||||
assistant:
|
||||
"Good morning! Executing your weekday morning routine: Gradually increasing bedroom lights (now at 30%), starting coffee maker with your weekday strength setting, setting bathroom heater to 72°F, and pulling up today's weather and calendar on your kitchen display.",
|
||||
},
|
||||
{
|
||||
user: "I'm working from home today",
|
||||
assistant: "Activating work-from-home environment: Setting home office lights to your preferred 5000K color temperature at 80% brightness, adjusting HVAC for daytime occupancy, enabling do-not-disturb on doorbell, and configuring automated breaks reminder every 2 hours."
|
||||
assistant:
|
||||
"Activating work-from-home environment: Setting home office lights to your preferred 5000K color temperature at 80% brightness, adjusting HVAC for daytime occupancy, enabling do-not-disturb on doorbell, and configuring automated breaks reminder every 2 hours.",
|
||||
},
|
||||
{
|
||||
user: "We're having guests over",
|
||||
assistant: "Preparing home for guests: Adjusting living room and dining room temperature to 72°F, setting accent lighting to warm white (2700K), activating air purifier in high mode, enabling guest WiFi network, and preheating the oven as per your usual entertaining preferences."
|
||||
assistant:
|
||||
"Preparing home for guests: Adjusting living room and dining room temperature to 72°F, setting accent lighting to warm white (2700K), activating air purifier in high mode, enabling guest WiFi network, and preheating the oven as per your usual entertaining preferences.",
|
||||
},
|
||||
{
|
||||
user: "Monitor the baby's room",
|
||||
assistant: "Activating enhanced nursery monitoring: Setting up temperature alerts (68-72°F range), enabling sound monitoring with smart notification threshold, maintaining humidity at 50%, and activating the smart night light with color changing based on sleep schedule. Current readings: 70°F, 48% humidity, sound level: minimal."
|
||||
}
|
||||
]
|
||||
assistant:
|
||||
"Activating enhanced nursery monitoring: Setting up temperature alerts (68-72°F range), enabling sound monitoring with smart notification threshold, maintaining humidity at 50%, and activating the smart night light with color changing based on sleep schedule. Current readings: 70°F, 48% humidity, sound level: minimal.",
|
||||
},
|
||||
],
|
||||
},
|
||||
variables: {
|
||||
device_name: "string",
|
||||
location: "string",
|
||||
action: "string",
|
||||
parameters: "object",
|
||||
context: "object"
|
||||
}
|
||||
}
|
||||
context: "object",
|
||||
},
|
||||
},
|
||||
],
|
||||
resources: [
|
||||
{
|
||||
name: "Home Assistant API",
|
||||
url: "https://developers.home-assistant.io/docs/api/rest/"
|
||||
url: "https://developers.home-assistant.io/docs/api/rest/",
|
||||
},
|
||||
{
|
||||
name: "Home Assistant WebSocket API",
|
||||
url: "https://developers.home-assistant.io/docs/api/websocket"
|
||||
url: "https://developers.home-assistant.io/docs/api/websocket",
|
||||
},
|
||||
{
|
||||
name: "HACS Documentation",
|
||||
url: "https://hacs.xyz"
|
||||
}
|
||||
]
|
||||
url: "https://hacs.xyz",
|
||||
},
|
||||
],
|
||||
};
|
||||
203
src/middleware/__tests__/security.middleware.test.ts
Normal file
203
src/middleware/__tests__/security.middleware.test.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { Request, Response } from "express";
|
||||
import { validateRequest, sanitizeInput, errorHandler } from "../index";
|
||||
import { TokenManager } from "../../security/index";
|
||||
import { jest } from "@jest/globals";
|
||||
|
||||
const TEST_SECRET = "test-secret-that-is-long-enough-for-testing-purposes";
|
||||
|
||||
describe("Security Middleware", () => {
|
||||
let mockRequest: Partial<Request>;
|
||||
let mockResponse: Partial<Response>;
|
||||
let nextFunction: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
process.env.JWT_SECRET = TEST_SECRET;
|
||||
mockRequest = {
|
||||
method: "POST",
|
||||
headers: {},
|
||||
body: {},
|
||||
ip: "127.0.0.1",
|
||||
};
|
||||
|
||||
const mockJson = jest.fn().mockReturnThis();
|
||||
const mockStatus = jest.fn().mockReturnThis();
|
||||
const mockSetHeader = jest.fn().mockReturnThis();
|
||||
const mockRemoveHeader = jest.fn().mockReturnThis();
|
||||
|
||||
mockResponse = {
|
||||
status: mockStatus as any,
|
||||
json: mockJson as any,
|
||||
setHeader: mockSetHeader as any,
|
||||
removeHeader: mockRemoveHeader as any,
|
||||
};
|
||||
nextFunction = jest.fn();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
delete process.env.JWT_SECRET;
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("Request Validation", () => {
|
||||
it("should pass valid requests", () => {
|
||||
mockRequest.headers = {
|
||||
authorization: "Bearer valid-token",
|
||||
"content-type": "application/json",
|
||||
};
|
||||
jest
|
||||
.spyOn(TokenManager, "validateToken")
|
||||
.mockReturnValue({ valid: true });
|
||||
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should reject requests without authorization header", () => {
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
message: "Unauthorized",
|
||||
error: "Missing or invalid authorization header",
|
||||
timestamp: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it("should reject requests with invalid authorization format", () => {
|
||||
mockRequest.headers = {
|
||||
authorization: "invalid-format",
|
||||
"content-type": "application/json",
|
||||
};
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
message: "Unauthorized",
|
||||
error: "Missing or invalid authorization header",
|
||||
timestamp: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it("should reject oversized requests", () => {
|
||||
mockRequest.headers = {
|
||||
authorization: "Bearer valid-token",
|
||||
"content-type": "application/json",
|
||||
"content-length": "1048577", // 1MB + 1 byte
|
||||
};
|
||||
validateRequest(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(413);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
message: "Payload Too Large",
|
||||
error: "Request body must not exceed 1048576 bytes",
|
||||
timestamp: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("Input Sanitization", () => {
|
||||
it("should sanitize HTML in request body", () => {
|
||||
mockRequest.body = {
|
||||
text: 'Test <script>alert("xss")</script>',
|
||||
nested: {
|
||||
html: '<img src="x" onerror="alert(1)">World',
|
||||
},
|
||||
};
|
||||
sanitizeInput(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body.text).toBe("Test ");
|
||||
expect(mockRequest.body.nested.html).toBe("World");
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should handle non-object bodies", () => {
|
||||
mockRequest.body = "<p>text</p>";
|
||||
sanitizeInput(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body).toBe("text");
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("should preserve non-string values", () => {
|
||||
mockRequest.body = {
|
||||
number: 123,
|
||||
boolean: true,
|
||||
array: [1, 2, 3],
|
||||
nested: { value: 456 },
|
||||
};
|
||||
sanitizeInput(
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body).toEqual({
|
||||
number: 123,
|
||||
boolean: true,
|
||||
array: [1, 2, 3],
|
||||
nested: { value: 456 },
|
||||
});
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Error Handler", () => {
|
||||
it("should handle errors in production mode", () => {
|
||||
process.env.NODE_ENV = "production";
|
||||
const error = new Error("Test error");
|
||||
errorHandler(
|
||||
error,
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
message: "Internal Server Error",
|
||||
error: "An unexpected error occurred",
|
||||
timestamp: expect.any(String),
|
||||
});
|
||||
});
|
||||
|
||||
it("should include error details in development mode", () => {
|
||||
process.env.NODE_ENV = "development";
|
||||
const error = new Error("Test error");
|
||||
errorHandler(
|
||||
error,
|
||||
mockRequest as Request,
|
||||
mockResponse as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(500);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith({
|
||||
success: false,
|
||||
message: "Internal Server Error",
|
||||
error: "Test error",
|
||||
stack: expect.any(String),
|
||||
timestamp: expect.any(String),
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,130 +1,285 @@
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { HASS_CONFIG, RATE_LIMIT_CONFIG } from '../config/index.js';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { HASS_CONFIG, RATE_LIMIT_CONFIG } from "../config/index.js";
|
||||
import rateLimit from "express-rate-limit";
|
||||
import { TokenManager } from "../security/index.js";
|
||||
import sanitizeHtml from "sanitize-html";
|
||||
import helmet from "helmet";
|
||||
import { SECURITY_CONFIG } from "../config/security.config.js";
|
||||
|
||||
// Rate limiter middleware
|
||||
// Rate limiter middleware with enhanced configuration
|
||||
export const rateLimiter = rateLimit({
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
max: RATE_LIMIT_CONFIG.REGULAR,
|
||||
windowMs: SECURITY_CONFIG.RATE_LIMIT_WINDOW,
|
||||
max: SECURITY_CONFIG.RATE_LIMIT_MAX_REQUESTS,
|
||||
message: {
|
||||
success: false,
|
||||
message: 'Too many requests, please try again later.',
|
||||
reset_time: new Date(Date.now() + 60 * 1000).toISOString()
|
||||
}
|
||||
message: "Too Many Requests",
|
||||
error: "Rate limit exceeded. Please try again later.",
|
||||
timestamp: new Date().toISOString(),
|
||||
},
|
||||
});
|
||||
|
||||
// WebSocket rate limiter middleware
|
||||
// WebSocket rate limiter middleware with enhanced configuration
|
||||
export const wsRateLimiter = rateLimit({
|
||||
windowMs: 60 * 1000, // 1 minute
|
||||
max: RATE_LIMIT_CONFIG.WEBSOCKET,
|
||||
standardHeaders: true,
|
||||
legacyHeaders: false,
|
||||
message: {
|
||||
success: false,
|
||||
message: 'Too many WebSocket connections, please try again later.',
|
||||
reset_time: new Date(Date.now() + 60 * 1000).toISOString()
|
||||
}
|
||||
message: "Too many WebSocket connections, please try again later.",
|
||||
reset_time: new Date(Date.now() + 60 * 1000).toISOString(),
|
||||
},
|
||||
skipSuccessfulRequests: false,
|
||||
keyGenerator: (req) => req.ip || req.socket.remoteAddress || "unknown",
|
||||
});
|
||||
|
||||
// Security headers middleware
|
||||
export const securityHeaders = (_req: Request, res: Response, next: NextFunction) => {
|
||||
res.setHeader('X-Content-Type-Options', 'nosniff');
|
||||
res.setHeader('X-Frame-Options', 'DENY');
|
||||
res.setHeader('X-XSS-Protection', '1; mode=block');
|
||||
res.setHeader('Strict-Transport-Security', 'max-age=31536000; includeSubDomains');
|
||||
// Authentication middleware with enhanced security
|
||||
export const authenticate = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: "Unauthorized",
|
||||
error: "Missing or invalid authorization header",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
const token = authHeader.replace("Bearer ", "");
|
||||
const clientIp = req.ip || req.socket.remoteAddress || "";
|
||||
|
||||
const validationResult = TokenManager.validateToken(token, clientIp);
|
||||
|
||||
if (!validationResult.valid) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: "Unauthorized",
|
||||
error: validationResult.error || "Invalid token",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
// Request validation middleware
|
||||
export const validateRequest = (req: Request, res: Response, next: NextFunction) => {
|
||||
// Validate content type for POST/PUT/PATCH requests
|
||||
if (['POST', 'PUT', 'PATCH'].includes(req.method) && !req.is('application/json')) {
|
||||
// Enhanced security headers middleware using helmet
|
||||
const helmetMiddleware = helmet({
|
||||
contentSecurityPolicy: {
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", "data:", "https:"],
|
||||
connectSrc: ["'self'", "wss:", "https:"],
|
||||
frameSrc: ["'none'"],
|
||||
objectSrc: ["'none'"],
|
||||
baseUri: ["'self'"],
|
||||
formAction: ["'self'"],
|
||||
frameAncestors: ["'none'"],
|
||||
},
|
||||
},
|
||||
crossOriginEmbedderPolicy: true,
|
||||
crossOriginOpenerPolicy: { policy: "same-origin" },
|
||||
crossOriginResourcePolicy: { policy: "same-origin" },
|
||||
dnsPrefetchControl: { allow: false },
|
||||
frameguard: { action: "deny" },
|
||||
hidePoweredBy: true,
|
||||
hsts: {
|
||||
maxAge: 31536000,
|
||||
includeSubDomains: true,
|
||||
preload: true,
|
||||
},
|
||||
ieNoOpen: true,
|
||||
noSniff: true,
|
||||
originAgentCluster: true,
|
||||
permittedCrossDomainPolicies: { permittedPolicies: "none" },
|
||||
referrerPolicy: { policy: "strict-origin-when-cross-origin" },
|
||||
xssFilter: true,
|
||||
});
|
||||
|
||||
// Wrapper for helmet middleware to handle mock responses in tests
|
||||
export const securityHeaders = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): void => {
|
||||
// Basic security headers
|
||||
res.setHeader("X-Content-Type-Options", "nosniff");
|
||||
res.setHeader("X-Frame-Options", "DENY");
|
||||
res.setHeader("X-XSS-Protection", "1; mode=block");
|
||||
res.setHeader("Referrer-Policy", "strict-origin-when-cross-origin");
|
||||
res.setHeader("X-Permitted-Cross-Domain-Policies", "none");
|
||||
res.setHeader("X-Download-Options", "noopen");
|
||||
|
||||
// Content Security Policy
|
||||
res.setHeader(
|
||||
"Content-Security-Policy",
|
||||
[
|
||||
"default-src 'self'",
|
||||
"script-src 'self'",
|
||||
"style-src 'self'",
|
||||
"img-src 'self'",
|
||||
"font-src 'self'",
|
||||
"connect-src 'self'",
|
||||
"media-src 'self'",
|
||||
"object-src 'none'",
|
||||
"frame-ancestors 'none'",
|
||||
"base-uri 'self'",
|
||||
"form-action 'self'",
|
||||
].join("; "),
|
||||
);
|
||||
|
||||
// HSTS (only in production)
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
res.setHeader(
|
||||
"Strict-Transport-Security",
|
||||
"max-age=31536000; includeSubDomains; preload",
|
||||
);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Validates incoming requests for proper authentication and content type
|
||||
*/
|
||||
export const validateRequest = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): Response | void => {
|
||||
// Skip validation for health and MCP schema endpoints
|
||||
if (req.path === "/health" || req.path === "/mcp") {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Validate content type for non-GET requests
|
||||
if (["POST", "PUT", "PATCH"].includes(req.method)) {
|
||||
const contentType = req.headers["content-type"] || "";
|
||||
if (!contentType.toLowerCase().includes("application/json")) {
|
||||
return res.status(415).json({
|
||||
success: false,
|
||||
message: 'Content-Type must be application/json'
|
||||
message: "Unsupported Media Type",
|
||||
error: "Content-Type must be application/json",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
// Validate request body size
|
||||
const contentLength = parseInt(req.headers['content-length'] || '0', 10);
|
||||
if (contentLength > 1024 * 1024) { // 1MB limit
|
||||
return res.status(413).json({
|
||||
success: false,
|
||||
message: 'Request body too large'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
// Input sanitization middleware
|
||||
export const sanitizeInput = (req: Request, _res: Response, next: NextFunction) => {
|
||||
if (req.body) {
|
||||
// Recursively sanitize object
|
||||
const sanitizeObject = (obj: any): any => {
|
||||
if (typeof obj !== 'object' || obj === null) {
|
||||
return obj;
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(item => sanitizeObject(item));
|
||||
}
|
||||
|
||||
const sanitized: any = {};
|
||||
for (const [key, value] of Object.entries(obj)) {
|
||||
// Remove any potentially dangerous characters from keys
|
||||
const sanitizedKey = key.replace(/[<>]/g, '');
|
||||
sanitized[sanitizedKey] = sanitizeObject(value);
|
||||
}
|
||||
|
||||
return sanitized;
|
||||
};
|
||||
|
||||
req.body = sanitizeObject(req.body);
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
// Authentication middleware
|
||||
export const authenticate = (req: Request, res: Response, next: NextFunction) => {
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
|
||||
if (!token || token !== HASS_CONFIG.TOKEN) {
|
||||
// Validate authorization header
|
||||
const authHeader = req.headers.authorization;
|
||||
if (!authHeader || !authHeader.startsWith("Bearer ")) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized",
|
||||
error: "Missing or invalid authorization header",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
// Validate token
|
||||
const token = authHeader.replace("Bearer ", "");
|
||||
const validationResult = TokenManager.validateToken(token, req.ip);
|
||||
if (!validationResult.valid) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: "Unauthorized",
|
||||
error: validationResult.error || "Invalid token",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
// Error handling middleware
|
||||
export const errorHandler = (err: Error, _req: Request, res: Response, _next: NextFunction) => {
|
||||
console.error('Error:', err);
|
||||
|
||||
// Handle specific error types
|
||||
if (err.name === 'ValidationError') {
|
||||
// Validate request body structure
|
||||
if (req.method !== "GET" && req.body) {
|
||||
if (typeof req.body !== "object" || Array.isArray(req.body)) {
|
||||
return res.status(400).json({
|
||||
success: false,
|
||||
message: 'Validation error',
|
||||
details: err.message
|
||||
message: "Bad Request",
|
||||
error: "Invalid request body structure",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
if (err.name === 'UnauthorizedError') {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized',
|
||||
details: err.message
|
||||
});
|
||||
}
|
||||
|
||||
// Default error response
|
||||
res.status(500).json({
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Sanitizes input data to prevent XSS attacks
|
||||
*/
|
||||
export const sanitizeInput = (
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): void => {
|
||||
if (req.body && typeof req.body === "object" && !Array.isArray(req.body)) {
|
||||
const sanitizeValue = (value: unknown): unknown => {
|
||||
if (typeof value === "string") {
|
||||
let sanitized = value;
|
||||
// Remove script tags and their content
|
||||
sanitized = sanitized.replace(
|
||||
/<script\b[^<]*(?:(?!<\/script>)<[^<]*)*<\/script>/gi,
|
||||
"",
|
||||
);
|
||||
// Remove style tags and their content
|
||||
sanitized = sanitized.replace(
|
||||
/<style\b[^<]*(?:(?!<\/style>)<[^<]*)*<\/style>/gi,
|
||||
"",
|
||||
);
|
||||
// Remove remaining HTML tags
|
||||
sanitized = sanitized.replace(/<[^>]+>/g, "");
|
||||
// Remove javascript: protocol
|
||||
sanitized = sanitized.replace(/javascript:/gi, "");
|
||||
// Remove event handlers
|
||||
sanitized = sanitized.replace(
|
||||
/on\w+\s*=\s*(?:".*?"|'.*?'|[^"'>\s]+)/gi,
|
||||
"",
|
||||
);
|
||||
// Trim whitespace
|
||||
return sanitized.trim();
|
||||
} else if (typeof value === "object" && value !== null) {
|
||||
const result: Record<string, unknown> = {};
|
||||
Object.entries(value as Record<string, unknown>).forEach(
|
||||
([key, val]) => {
|
||||
result[key] = sanitizeValue(val);
|
||||
},
|
||||
);
|
||||
return result;
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
req.body = sanitizeValue(req.body) as Record<string, unknown>;
|
||||
}
|
||||
next();
|
||||
};
|
||||
|
||||
/**
|
||||
* Handles errors in a consistent way
|
||||
*/
|
||||
export const errorHandler = (
|
||||
err: Error,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): Response => {
|
||||
const isDevelopment = process.env.NODE_ENV === "development";
|
||||
const response: Record<string, unknown> = {
|
||||
success: false,
|
||||
message: 'Internal server error',
|
||||
details: process.env.NODE_ENV === 'development' ? err.message : undefined
|
||||
});
|
||||
message: "Internal Server Error",
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (isDevelopment) {
|
||||
response.error = err.message;
|
||||
response.stack = err.stack;
|
||||
}
|
||||
|
||||
return res.status(500).json(response);
|
||||
};
|
||||
|
||||
// Export all middleware
|
||||
@@ -135,5 +290,5 @@ export const middleware = {
|
||||
validateRequest,
|
||||
sanitizeInput,
|
||||
authenticate,
|
||||
errorHandler
|
||||
errorHandler,
|
||||
};
|
||||
@@ -7,9 +7,9 @@
|
||||
* @module logging-middleware
|
||||
*/
|
||||
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { logger } from '../utils/logger.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { logger } from "../utils/logger.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
/**
|
||||
* Interface for extended request object with timing information
|
||||
@@ -37,9 +37,9 @@ const getResponseTime = (startTime: number): number => {
|
||||
*/
|
||||
const getClientIp = (req: Request): string => {
|
||||
return (
|
||||
(req.headers['x-forwarded-for'] as string)?.split(',')[0] ||
|
||||
(req.headers["x-forwarded-for"] as string)?.split(",")[0] ||
|
||||
req.socket.remoteAddress ||
|
||||
'unknown'
|
||||
"unknown"
|
||||
);
|
||||
};
|
||||
|
||||
@@ -59,7 +59,11 @@ const formatRequestLog = (req: TimedRequest): string => {
|
||||
* @param time - Response time in milliseconds
|
||||
* @returns Formatted log message
|
||||
*/
|
||||
const formatResponseLog = (req: TimedRequest, res: Response, time: number): string => {
|
||||
const formatResponseLog = (
|
||||
req: TimedRequest,
|
||||
res: Response,
|
||||
time: number,
|
||||
): string => {
|
||||
return `${req.method} ${req.originalUrl} - ${res.statusCode} - ${time.toFixed(2)}ms`;
|
||||
};
|
||||
|
||||
@@ -67,7 +71,11 @@ const formatResponseLog = (req: TimedRequest, res: Response, time: number): stri
|
||||
* Request logging middleware
|
||||
* Logs information about incoming requests and their responses
|
||||
*/
|
||||
export const requestLogger = (req: TimedRequest, res: Response, next: NextFunction): void => {
|
||||
export const requestLogger = (
|
||||
req: TimedRequest,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): void => {
|
||||
if (!APP_CONFIG.LOGGING.LOG_REQUESTS) {
|
||||
next();
|
||||
return;
|
||||
@@ -80,9 +88,9 @@ export const requestLogger = (req: TimedRequest, res: Response, next: NextFuncti
|
||||
logger.http(formatRequestLog(req));
|
||||
|
||||
// Log response
|
||||
res.on('finish', () => {
|
||||
res.on("finish", () => {
|
||||
const responseTime = Date.now() - (req.startTime || 0);
|
||||
const logLevel = res.statusCode >= 400 ? 'warn' : 'http';
|
||||
const logLevel = res.statusCode >= 400 ? "warn" : "http";
|
||||
logger[logLevel](formatResponseLog(req, res, responseTime));
|
||||
});
|
||||
|
||||
@@ -93,14 +101,22 @@ export const requestLogger = (req: TimedRequest, res: Response, next: NextFuncti
|
||||
* Error logging middleware
|
||||
* Logs errors that occur during request processing
|
||||
*/
|
||||
export const errorLogger = (err: Error, req: Request, res: Response, next: NextFunction): void => {
|
||||
logger.error(`Error processing ${req.method} ${req.originalUrl}: ${err.message}`, {
|
||||
export const errorLogger = (
|
||||
err: Error,
|
||||
req: Request,
|
||||
res: Response,
|
||||
next: NextFunction,
|
||||
): void => {
|
||||
logger.error(
|
||||
`Error processing ${req.method} ${req.originalUrl}: ${err.message}`,
|
||||
{
|
||||
error: err.stack,
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
body: req.body,
|
||||
query: req.query,
|
||||
ip: getClientIp(req)
|
||||
});
|
||||
ip: getClientIp(req),
|
||||
},
|
||||
);
|
||||
next(err);
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import { exec } from 'child_process';
|
||||
import { promisify } from 'util';
|
||||
import { EventEmitter } from 'events';
|
||||
import { exec } from "child_process";
|
||||
import { promisify } from "util";
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
const execAsync = promisify(exec);
|
||||
|
||||
@@ -25,7 +25,7 @@ class MacOSIntegration extends EventEmitter {
|
||||
this.permissions = {
|
||||
notifications: false,
|
||||
automation: false,
|
||||
accessibility: false
|
||||
accessibility: false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -38,51 +38,55 @@ class MacOSIntegration extends EventEmitter {
|
||||
try {
|
||||
// Check notification permissions
|
||||
const { stdout: notifPerms } = await execAsync(
|
||||
'osascript -e \'tell application "System Events" to get properties\''
|
||||
"osascript -e 'tell application \"System Events\" to get properties'",
|
||||
);
|
||||
this.permissions.notifications = notifPerms.includes(
|
||||
"notifications enabled:true",
|
||||
);
|
||||
this.permissions.notifications = notifPerms.includes('notifications enabled:true');
|
||||
|
||||
// Check automation permissions
|
||||
const { stdout: autoPerms } = await execAsync(
|
||||
'osascript -e \'tell application "System Events" to get UI elements enabled\''
|
||||
"osascript -e 'tell application \"System Events\" to get UI elements enabled'",
|
||||
);
|
||||
this.permissions.automation = autoPerms.includes('true');
|
||||
this.permissions.automation = autoPerms.includes("true");
|
||||
|
||||
// Check accessibility permissions
|
||||
const { stdout: accessPerms } = await execAsync(
|
||||
'osascript -e \'tell application "System Events" to get processes\''
|
||||
"osascript -e 'tell application \"System Events\" to get processes'",
|
||||
);
|
||||
this.permissions.accessibility = !accessPerms.includes('error');
|
||||
this.permissions.accessibility = !accessPerms.includes("error");
|
||||
|
||||
return this.permissions;
|
||||
} catch (error) {
|
||||
console.error('Error checking permissions:', error);
|
||||
console.error("Error checking permissions:", error);
|
||||
return this.permissions;
|
||||
}
|
||||
}
|
||||
|
||||
async sendNotification(notification: MacOSNotification): Promise<void> {
|
||||
if (!this.permissions.notifications) {
|
||||
throw new Error('Notification permission not granted');
|
||||
throw new Error("Notification permission not granted");
|
||||
}
|
||||
|
||||
const script = `
|
||||
display notification "${notification.message}"${notification.subtitle ? ` with subtitle "${notification.subtitle}"` : ''
|
||||
} with title "${notification.title}"${notification.sound ? ' sound name "default"' : ''
|
||||
display notification "${notification.message}"${
|
||||
notification.subtitle ? ` with subtitle "${notification.subtitle}"` : ""
|
||||
} with title "${notification.title}"${
|
||||
notification.sound ? ' sound name "default"' : ""
|
||||
}
|
||||
`;
|
||||
|
||||
try {
|
||||
await execAsync(`osascript -e '${script}'`);
|
||||
} catch (error) {
|
||||
console.error('Error sending notification:', error);
|
||||
console.error("Error sending notification:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async registerSystemEvents(): Promise<void> {
|
||||
if (!this.permissions.automation) {
|
||||
throw new Error('Automation permission not granted');
|
||||
throw new Error("Automation permission not granted");
|
||||
}
|
||||
|
||||
// Monitor system events
|
||||
@@ -112,23 +116,23 @@ class MacOSIntegration extends EventEmitter {
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync(`osascript -e '${script}'`);
|
||||
const events = stdout.split(',').map(e => e.trim());
|
||||
events.forEach(event => this.emit('system_event', event));
|
||||
const events = stdout.split(",").map((e) => e.trim());
|
||||
events.forEach((event) => this.emit("system_event", event));
|
||||
} catch (error) {
|
||||
console.error('Error monitoring system events:', error);
|
||||
console.error("Error monitoring system events:", error);
|
||||
}
|
||||
}
|
||||
|
||||
async executeAutomation(script: string): Promise<string> {
|
||||
if (!this.permissions.automation) {
|
||||
throw new Error('Automation permission not granted');
|
||||
throw new Error("Automation permission not granted");
|
||||
}
|
||||
|
||||
try {
|
||||
const { stdout } = await execAsync(`osascript -e '${script}'`);
|
||||
return stdout;
|
||||
} catch (error) {
|
||||
console.error('Error executing automation:', error);
|
||||
console.error("Error executing automation:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -138,36 +142,40 @@ class MacOSIntegration extends EventEmitter {
|
||||
|
||||
try {
|
||||
// Get macOS version
|
||||
const { stdout: version } = await execAsync('sw_vers -productVersion');
|
||||
const { stdout: version } = await execAsync("sw_vers -productVersion");
|
||||
info.os_version = version.trim();
|
||||
|
||||
// Get hardware info
|
||||
const { stdout: hardware } = await execAsync('system_profiler SPHardwareDataType');
|
||||
const { stdout: hardware } = await execAsync(
|
||||
"system_profiler SPHardwareDataType",
|
||||
);
|
||||
info.hardware = this.parseSystemProfile(hardware);
|
||||
|
||||
// Get power info
|
||||
const { stdout: power } = await execAsync('pmset -g batt');
|
||||
const { stdout: power } = await execAsync("pmset -g batt");
|
||||
info.power = this.parsePowerInfo(power);
|
||||
|
||||
// Get network info
|
||||
const { stdout: network } = await execAsync('networksetup -listallhardwareports');
|
||||
const { stdout: network } = await execAsync(
|
||||
"networksetup -listallhardwareports",
|
||||
);
|
||||
info.network = this.parseNetworkInfo(network);
|
||||
|
||||
return info;
|
||||
} catch (error) {
|
||||
console.error('Error getting system info:', error);
|
||||
console.error("Error getting system info:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private parseSystemProfile(output: string): Record<string, any> {
|
||||
const info: Record<string, any> = {};
|
||||
const lines = output.split('\n');
|
||||
const lines = output.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
const [key, value] = line.split(':').map(s => s.trim());
|
||||
const [key, value] = line.split(":").map((s) => s.trim());
|
||||
if (key && value) {
|
||||
info[key.toLowerCase().replace(/\s+/g, '_')] = value;
|
||||
info[key.toLowerCase().replace(/\s+/g, "_")] = value;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,12 +184,12 @@ class MacOSIntegration extends EventEmitter {
|
||||
|
||||
private parsePowerInfo(output: string): Record<string, any> {
|
||||
const info: Record<string, any> = {};
|
||||
const lines = output.split('\n');
|
||||
const lines = output.split("\n");
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes('Now drawing from')) {
|
||||
info.power_source = line.includes('Battery') ? 'battery' : 'ac_power';
|
||||
} else if (line.includes('%')) {
|
||||
if (line.includes("Now drawing from")) {
|
||||
info.power_source = line.includes("Battery") ? "battery" : "ac_power";
|
||||
} else if (line.includes("%")) {
|
||||
const matches = line.match(/(\d+)%/);
|
||||
if (matches) {
|
||||
info.battery_percentage = parseInt(matches[1]);
|
||||
@@ -194,17 +202,17 @@ class MacOSIntegration extends EventEmitter {
|
||||
|
||||
private parseNetworkInfo(output: string): Record<string, any> {
|
||||
const info: Record<string, any> = {};
|
||||
const lines = output.split('\n');
|
||||
const lines = output.split("\n");
|
||||
let currentInterface: string | null = null;
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.includes('Hardware Port:')) {
|
||||
currentInterface = line.split(':')[1].trim();
|
||||
if (line.includes("Hardware Port:")) {
|
||||
currentInterface = line.split(":")[1].trim();
|
||||
info[currentInterface] = {};
|
||||
} else if (currentInterface && line.includes('Device:')) {
|
||||
info[currentInterface].device = line.split(':')[1].trim();
|
||||
} else if (currentInterface && line.includes('Ethernet Address:')) {
|
||||
info[currentInterface].mac = line.split(':')[1].trim();
|
||||
} else if (currentInterface && line.includes("Device:")) {
|
||||
info[currentInterface].device = line.split(":")[1].trim();
|
||||
} else if (currentInterface && line.includes("Ethernet Address:")) {
|
||||
info[currentInterface].mac = line.split(":")[1].trim();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
9
src/polyfills.js
Normal file
9
src/polyfills.js
Normal file
@@ -0,0 +1,9 @@
|
||||
// Add necessary polyfills for Node.js compatibility in Bun
|
||||
import { webcrypto } from 'node:crypto';
|
||||
|
||||
// Polyfill for crypto.subtle in Bun
|
||||
if (!globalThis.crypto?.subtle) {
|
||||
globalThis.crypto = webcrypto;
|
||||
}
|
||||
|
||||
// Add any other necessary polyfills here
|
||||
@@ -1,14 +1,14 @@
|
||||
import { Router } from 'express';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { Router } from "express";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// Health check endpoint
|
||||
router.get('/', (_req, res) => {
|
||||
router.get("/", (_req, res) => {
|
||||
res.json({
|
||||
status: 'ok',
|
||||
status: "ok",
|
||||
timestamp: new Date().toISOString(),
|
||||
version: APP_CONFIG.VERSION
|
||||
version: APP_CONFIG.VERSION,
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -8,11 +8,11 @@
|
||||
* @module routes
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import { mcpRoutes } from './mcp.routes.js';
|
||||
import { sseRoutes } from './sse.routes.js';
|
||||
import { toolRoutes } from './tool.routes.js';
|
||||
import { healthRoutes } from './health.routes.js';
|
||||
import { Router } from "express";
|
||||
import { mcpRoutes } from "./mcp.routes.js";
|
||||
import { sseRoutes } from "./sse.routes.js";
|
||||
import { toolRoutes } from "./tool.routes.js";
|
||||
import { healthRoutes } from "./health.routes.js";
|
||||
|
||||
/**
|
||||
* Create main router instance
|
||||
@@ -27,10 +27,10 @@ const router = Router();
|
||||
* - /tools: Tool management endpoints
|
||||
* - /health: Health check endpoint
|
||||
*/
|
||||
router.use('/mcp', mcpRoutes);
|
||||
router.use('/sse', sseRoutes);
|
||||
router.use('/tools', toolRoutes);
|
||||
router.use('/health', healthRoutes);
|
||||
router.use("/mcp", mcpRoutes);
|
||||
router.use("/sse", sseRoutes);
|
||||
router.use("/tools", toolRoutes);
|
||||
router.use("/health", healthRoutes);
|
||||
|
||||
/**
|
||||
* Export the configured router
|
||||
|
||||
@@ -7,10 +7,10 @@
|
||||
* @module mcp-routes
|
||||
*/
|
||||
|
||||
import { Router } from 'express';
|
||||
import { MCP_SCHEMA } from '../mcp/schema.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { Tool } from '../types/index.js';
|
||||
import { Router } from "express";
|
||||
import { MCP_SCHEMA } from "../mcp/schema.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
import { Tool } from "../types/index.js";
|
||||
|
||||
/**
|
||||
* Create router instance for MCP routes
|
||||
@@ -28,7 +28,7 @@ const tools: Tool[] = [];
|
||||
* Returns the MCP schema without requiring authentication
|
||||
* This endpoint allows clients to discover available tools and their parameters
|
||||
*/
|
||||
router.get('/', (_req, res) => {
|
||||
router.get("/", (_req, res) => {
|
||||
res.json(MCP_SCHEMA);
|
||||
});
|
||||
|
||||
@@ -44,26 +44,26 @@ router.get('/', (_req, res) => {
|
||||
* @throws {404} If tool is not found
|
||||
* @throws {500} If execution fails
|
||||
*/
|
||||
router.post('/execute', async (req, res) => {
|
||||
router.post("/execute", async (req, res) => {
|
||||
try {
|
||||
// Get token from Authorization header
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
const token = req.headers.authorization?.replace("Bearer ", "");
|
||||
|
||||
if (!token || token !== APP_CONFIG.HASS_TOKEN) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized - Invalid token",
|
||||
});
|
||||
}
|
||||
|
||||
const { tool: toolName, parameters } = req.body;
|
||||
|
||||
// Find the requested tool
|
||||
const tool = tools.find(t => t.name === toolName);
|
||||
const tool = tools.find((t) => t.name === toolName);
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: `Tool '${toolName}' not found`
|
||||
message: `Tool '${toolName}' not found`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -73,7 +73,8 @@ router.post('/execute', async (req, res) => {
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,99 +1,115 @@
|
||||
import { Router } from 'express';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { sseManager } from '../sse/index.js';
|
||||
import { TokenManager } from '../security/index.js';
|
||||
import { Router } from "express";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { sseManager } from "../sse/index.js";
|
||||
import { TokenManager } from "../security/index.js";
|
||||
import { middleware } from "../middleware/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
// SSE endpoints
|
||||
router.get('/subscribe', (req, res) => {
|
||||
router.get("/subscribe_events", middleware.wsRateLimiter, (req, res) => {
|
||||
try {
|
||||
// Get token from query parameter
|
||||
const token = req.query.token?.toString();
|
||||
// Get token from query parameter and validate
|
||||
const token = req.query.token?.toString() || "";
|
||||
const clientIp = req.ip || req.socket.remoteAddress || "";
|
||||
const validationResult = TokenManager.validateToken(token, clientIp);
|
||||
|
||||
if (!token || !TokenManager.validateToken(token)) {
|
||||
if (!validationResult.valid) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized",
|
||||
error: validationResult.error,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
|
||||
// Set SSE headers
|
||||
// Set SSE headers with enhanced security
|
||||
res.writeHead(200, {
|
||||
'Content-Type': 'text/event-stream',
|
||||
'Cache-Control': 'no-cache',
|
||||
'Connection': 'keep-alive',
|
||||
'Access-Control-Allow-Origin': '*'
|
||||
"Content-Type": "text/event-stream",
|
||||
"Cache-Control": "no-cache, no-transform",
|
||||
Connection: "keep-alive",
|
||||
"X-Accel-Buffering": "no",
|
||||
"Access-Control-Allow-Origin": "*",
|
||||
"Access-Control-Allow-Credentials": "true",
|
||||
});
|
||||
|
||||
// Send initial connection message
|
||||
res.write(`data: ${JSON.stringify({
|
||||
type: 'connection',
|
||||
status: 'connected',
|
||||
timestamp: new Date().toISOString()
|
||||
})}\n\n`);
|
||||
res.write(
|
||||
`data: ${JSON.stringify({
|
||||
type: "connection",
|
||||
status: "connected",
|
||||
timestamp: new Date().toISOString(),
|
||||
})}\n\n`,
|
||||
);
|
||||
|
||||
const clientId = uuidv4();
|
||||
const client = {
|
||||
id: clientId,
|
||||
ip: clientIp,
|
||||
connectedAt: new Date(),
|
||||
send: (data: string) => {
|
||||
res.write(`data: ${data}\n\n`);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// Add client to SSE manager
|
||||
// Add client to SSE manager with enhanced tracking
|
||||
const sseClient = sseManager.addClient(client, token);
|
||||
if (!sseClient || !sseClient.authenticated) {
|
||||
res.write(`data: ${JSON.stringify({
|
||||
type: 'error',
|
||||
message: sseClient ? 'Authentication failed' : 'Maximum client limit reached',
|
||||
timestamp: new Date().toISOString()
|
||||
})}\n\n`);
|
||||
const errorMessage = JSON.stringify({
|
||||
type: "error",
|
||||
message: sseClient
|
||||
? "Authentication failed"
|
||||
: "Maximum client limit reached",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
res.write(`data: ${errorMessage}\n\n`);
|
||||
return res.end();
|
||||
}
|
||||
|
||||
// Subscribe to events if specified
|
||||
const events = req.query.events?.toString().split(',').filter(Boolean);
|
||||
if (events?.length) {
|
||||
events.forEach(event => sseManager.subscribeToEvent(clientId, event));
|
||||
}
|
||||
|
||||
// Subscribe to entity if specified
|
||||
const entityId = req.query.entity_id?.toString();
|
||||
if (entityId) {
|
||||
sseManager.subscribeToEntity(clientId, entityId);
|
||||
}
|
||||
|
||||
// Subscribe to domain if specified
|
||||
const domain = req.query.domain?.toString();
|
||||
if (domain) {
|
||||
sseManager.subscribeToDomain(clientId, domain);
|
||||
}
|
||||
|
||||
// Handle client disconnect
|
||||
req.on('close', () => {
|
||||
req.on("close", () => {
|
||||
sseManager.removeClient(clientId);
|
||||
console.log(
|
||||
`Client ${clientId} disconnected at ${new Date().toISOString()}`,
|
||||
);
|
||||
});
|
||||
|
||||
// Handle errors
|
||||
req.on("error", (error) => {
|
||||
console.error(`SSE Error for client ${clientId}:`, error);
|
||||
const errorMessage = JSON.stringify({
|
||||
type: "error",
|
||||
message: "Connection error",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
res.write(`data: ${errorMessage}\n\n`);
|
||||
sseManager.removeClient(clientId);
|
||||
res.end();
|
||||
});
|
||||
} catch (error) {
|
||||
console.error("SSE Setup Error:", error);
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message: "Internal Server Error",
|
||||
error:
|
||||
error instanceof Error ? error.message : "An unexpected error occurred",
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Get SSE stats endpoint
|
||||
router.get('/stats', async (req, res) => {
|
||||
router.get("/stats", async (req, res) => {
|
||||
try {
|
||||
const stats = await sseManager.getStatistics();
|
||||
res.json(stats);
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
export { router as sseRoutes };
|
||||
export default router;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Router } from 'express';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { Tool } from '../types/index.js';
|
||||
import { Router } from "express";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
import { Tool } from "../types/index.js";
|
||||
|
||||
const router = Router();
|
||||
|
||||
@@ -8,23 +8,23 @@ const router = Router();
|
||||
const tools: Tool[] = [];
|
||||
|
||||
// List devices endpoint
|
||||
router.get('/devices', async (req, res) => {
|
||||
router.get("/devices", async (req, res) => {
|
||||
try {
|
||||
// Get token from Authorization header
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
const token = req.headers.authorization?.replace("Bearer ", "");
|
||||
|
||||
if (!token || token !== APP_CONFIG.HASS_TOKEN) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized - Invalid token",
|
||||
});
|
||||
}
|
||||
|
||||
const tool = tools.find(t => t.name === 'list_devices');
|
||||
const tool = tools.find((t) => t.name === "list_devices");
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Tool not found'
|
||||
message: "Tool not found",
|
||||
});
|
||||
}
|
||||
|
||||
@@ -33,41 +33,43 @@ router.get('/devices', async (req, res) => {
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// Control device endpoint
|
||||
router.post('/control', async (req, res) => {
|
||||
router.post("/control", async (req, res) => {
|
||||
try {
|
||||
// Get token from Authorization header
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
const token = req.headers.authorization?.replace("Bearer ", "");
|
||||
|
||||
if (!token || token !== APP_CONFIG.HASS_TOKEN) {
|
||||
return res.status(401).json({
|
||||
success: false,
|
||||
message: 'Unauthorized - Invalid token'
|
||||
message: "Unauthorized - Invalid token",
|
||||
});
|
||||
}
|
||||
|
||||
const tool = tools.find(t => t.name === 'control');
|
||||
const tool = tools.find((t) => t.name === "control");
|
||||
if (!tool) {
|
||||
return res.status(404).json({
|
||||
success: false,
|
||||
message: 'Tool not found'
|
||||
message: "Tool not found",
|
||||
});
|
||||
}
|
||||
|
||||
const result = await tool.execute({
|
||||
...req.body,
|
||||
token
|
||||
token,
|
||||
});
|
||||
res.json(result);
|
||||
} catch (error) {
|
||||
res.status(500).json({
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { z } from "zod";
|
||||
|
||||
|
||||
export const DomainSchema = z.enum([
|
||||
"light",
|
||||
"climate",
|
||||
@@ -14,7 +13,7 @@ export const DomainSchema = z.enum([
|
||||
"vacuum",
|
||||
"scene",
|
||||
"script",
|
||||
"camera"
|
||||
"camera",
|
||||
]);
|
||||
|
||||
// Generic list request schema
|
||||
@@ -58,12 +57,10 @@ export const AlarmSchema = z.object({
|
||||
state_attributes: AlarmAttributesSchema,
|
||||
});
|
||||
|
||||
|
||||
export const ListAlarmsResponseSchema = z.object({
|
||||
alarms: z.array(AlarmSchema),
|
||||
});
|
||||
|
||||
|
||||
// Devices
|
||||
|
||||
export const DeviceSchema = z.object({
|
||||
@@ -87,12 +84,12 @@ export const DeviceSchema = z.object({
|
||||
modified_at: z.number(),
|
||||
identifiers: z.array(z.any()),
|
||||
labels: z.array(z.string()),
|
||||
serial_number: z.string().optional()
|
||||
serial_number: z.string().optional(),
|
||||
});
|
||||
|
||||
export const ListDevicesResponseSchema = z.object({
|
||||
_meta: z.object({}).optional(),
|
||||
devices: z.array(DeviceSchema)
|
||||
devices: z.array(DeviceSchema),
|
||||
});
|
||||
|
||||
// Media Player
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { JSONSchemaType } from 'ajv';
|
||||
import { Entity, StateChangedEvent } from '../types/hass.js';
|
||||
import { JSONSchemaType } from "ajv";
|
||||
import { Entity, StateChangedEvent } from "../types/hass.js";
|
||||
|
||||
// Define base types for automation components
|
||||
type TriggerType = {
|
||||
@@ -31,14 +31,22 @@ type ActionType = {
|
||||
type AutomationType = {
|
||||
alias: string;
|
||||
description?: string | null;
|
||||
mode?: ('single' | 'parallel' | 'queued' | 'restart') | null;
|
||||
mode?: ("single" | "parallel" | "queued" | "restart") | null;
|
||||
trigger: TriggerType[];
|
||||
condition?: ConditionType[] | null;
|
||||
action: ActionType[];
|
||||
};
|
||||
|
||||
type DeviceControlType = {
|
||||
domain: 'light' | 'switch' | 'climate' | 'cover' | 'fan' | 'scene' | 'script' | 'media_player';
|
||||
domain:
|
||||
| "light"
|
||||
| "switch"
|
||||
| "climate"
|
||||
| "cover"
|
||||
| "fan"
|
||||
| "scene"
|
||||
| "script"
|
||||
| "media_player";
|
||||
command: string;
|
||||
entity_id: string | string[];
|
||||
parameters?: Record<string, any> | null;
|
||||
@@ -75,194 +83,210 @@ export interface Config {
|
||||
|
||||
// Define base schemas
|
||||
const contextSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
id: { type: 'string' },
|
||||
parent_id: { type: 'string', nullable: true },
|
||||
user_id: { type: 'string', nullable: true }
|
||||
id: { type: "string" },
|
||||
parent_id: { type: "string", nullable: true },
|
||||
user_id: { type: "string", nullable: true },
|
||||
},
|
||||
required: ['id', 'parent_id', 'user_id'],
|
||||
additionalProperties: false
|
||||
required: ["id", "parent_id", "user_id"],
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
|
||||
// Entity schema
|
||||
export const entitySchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
entity_id: { type: 'string' },
|
||||
state: { type: 'string' },
|
||||
entity_id: { type: "string" },
|
||||
state: { type: "string" },
|
||||
attributes: {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
type: "object",
|
||||
additionalProperties: true,
|
||||
},
|
||||
last_changed: { type: 'string' },
|
||||
last_updated: { type: 'string' },
|
||||
context: contextSchema
|
||||
last_changed: { type: "string" },
|
||||
last_updated: { type: "string" },
|
||||
context: contextSchema,
|
||||
},
|
||||
required: ['entity_id', 'state', 'attributes', 'last_changed', 'last_updated', 'context'],
|
||||
additionalProperties: false
|
||||
required: [
|
||||
"entity_id",
|
||||
"state",
|
||||
"attributes",
|
||||
"last_changed",
|
||||
"last_updated",
|
||||
"context",
|
||||
],
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
|
||||
// Service schema
|
||||
export const serviceSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
description: { type: 'string' },
|
||||
name: { type: "string" },
|
||||
description: { type: "string" },
|
||||
target: {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
nullable: true,
|
||||
properties: {
|
||||
entity: { type: 'array', items: { type: 'string' }, nullable: true },
|
||||
device: { type: 'array', items: { type: 'string' }, nullable: true },
|
||||
area: { type: 'array', items: { type: 'string' }, nullable: true }
|
||||
entity: { type: "array", items: { type: "string" }, nullable: true },
|
||||
device: { type: "array", items: { type: "string" }, nullable: true },
|
||||
area: { type: "array", items: { type: "string" }, nullable: true },
|
||||
},
|
||||
required: [],
|
||||
additionalProperties: false
|
||||
additionalProperties: false,
|
||||
},
|
||||
fields: {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
}
|
||||
type: "object",
|
||||
additionalProperties: true,
|
||||
},
|
||||
required: ['name', 'description', 'fields'],
|
||||
additionalProperties: false
|
||||
},
|
||||
required: ["name", "description", "fields"],
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
|
||||
// Define the trigger schema without type assertion
|
||||
export const triggerSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
platform: { type: 'string' },
|
||||
event: { type: 'string', nullable: true },
|
||||
entity_id: { type: 'string', nullable: true },
|
||||
to: { type: 'string', nullable: true },
|
||||
from: { type: 'string', nullable: true },
|
||||
offset: { type: 'string', nullable: true }
|
||||
platform: { type: "string" },
|
||||
event: { type: "string", nullable: true },
|
||||
entity_id: { type: "string", nullable: true },
|
||||
to: { type: "string", nullable: true },
|
||||
from: { type: "string", nullable: true },
|
||||
offset: { type: "string", nullable: true },
|
||||
},
|
||||
required: ['platform'],
|
||||
additionalProperties: true
|
||||
required: ["platform"],
|
||||
additionalProperties: true,
|
||||
};
|
||||
|
||||
// Define the automation schema
|
||||
export const automationSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
alias: { type: 'string' },
|
||||
description: { type: 'string', nullable: true },
|
||||
alias: { type: "string" },
|
||||
description: { type: "string", nullable: true },
|
||||
mode: {
|
||||
type: 'string',
|
||||
enum: ['single', 'parallel', 'queued', 'restart'],
|
||||
nullable: true
|
||||
type: "string",
|
||||
enum: ["single", "parallel", "queued", "restart"],
|
||||
nullable: true,
|
||||
},
|
||||
trigger: {
|
||||
type: 'array',
|
||||
items: triggerSchema
|
||||
type: "array",
|
||||
items: triggerSchema,
|
||||
},
|
||||
condition: {
|
||||
type: 'array',
|
||||
type: "array",
|
||||
items: {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
type: "object",
|
||||
additionalProperties: true,
|
||||
},
|
||||
nullable: true
|
||||
nullable: true,
|
||||
},
|
||||
action: {
|
||||
type: 'array',
|
||||
type: "array",
|
||||
items: {
|
||||
type: 'object',
|
||||
additionalProperties: true
|
||||
}
|
||||
}
|
||||
type: "object",
|
||||
additionalProperties: true,
|
||||
},
|
||||
required: ['alias', 'trigger', 'action'],
|
||||
additionalProperties: false
|
||||
},
|
||||
},
|
||||
required: ["alias", "trigger", "action"],
|
||||
additionalProperties: false,
|
||||
};
|
||||
|
||||
export const deviceControlSchema: JSONSchemaType<DeviceControlType> = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
domain: {
|
||||
type: 'string',
|
||||
enum: ['light', 'switch', 'climate', 'cover', 'fan', 'scene', 'script', 'media_player']
|
||||
type: "string",
|
||||
enum: [
|
||||
"light",
|
||||
"switch",
|
||||
"climate",
|
||||
"cover",
|
||||
"fan",
|
||||
"scene",
|
||||
"script",
|
||||
"media_player",
|
||||
],
|
||||
},
|
||||
command: { type: 'string' },
|
||||
command: { type: "string" },
|
||||
entity_id: {
|
||||
anyOf: [
|
||||
{ type: 'string' },
|
||||
{ type: "string" },
|
||||
{
|
||||
type: 'array',
|
||||
items: { type: 'string' }
|
||||
}
|
||||
]
|
||||
type: "array",
|
||||
items: { type: "string" },
|
||||
},
|
||||
],
|
||||
},
|
||||
parameters: {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
nullable: true,
|
||||
additionalProperties: true
|
||||
}
|
||||
additionalProperties: true,
|
||||
},
|
||||
required: ['domain', 'command', 'entity_id'],
|
||||
additionalProperties: false
|
||||
},
|
||||
required: ["domain", "command", "entity_id"],
|
||||
additionalProperties: false,
|
||||
};
|
||||
|
||||
// State changed event schema
|
||||
export const stateChangedEventSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
event_type: { type: 'string', const: 'state_changed' },
|
||||
event_type: { type: "string", const: "state_changed" },
|
||||
data: {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
entity_id: { type: 'string' },
|
||||
entity_id: { type: "string" },
|
||||
new_state: { ...entitySchema, nullable: true },
|
||||
old_state: { ...entitySchema, nullable: true }
|
||||
old_state: { ...entitySchema, nullable: true },
|
||||
},
|
||||
required: ['entity_id', 'new_state', 'old_state'],
|
||||
additionalProperties: false
|
||||
required: ["entity_id", "new_state", "old_state"],
|
||||
additionalProperties: false,
|
||||
},
|
||||
origin: { type: 'string' },
|
||||
time_fired: { type: 'string' },
|
||||
context: contextSchema
|
||||
origin: { type: "string" },
|
||||
time_fired: { type: "string" },
|
||||
context: contextSchema,
|
||||
},
|
||||
required: ['event_type', 'data', 'origin', 'time_fired', 'context'],
|
||||
additionalProperties: false
|
||||
required: ["event_type", "data", "origin", "time_fired", "context"],
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
|
||||
// Config schema
|
||||
export const configSchema = {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
components: { type: 'array', items: { type: 'string' } },
|
||||
config_dir: { type: 'string' },
|
||||
elevation: { type: 'number' },
|
||||
latitude: { type: 'number' },
|
||||
longitude: { type: 'number' },
|
||||
location_name: { type: 'string' },
|
||||
time_zone: { type: 'string' },
|
||||
components: { type: "array", items: { type: "string" } },
|
||||
config_dir: { type: "string" },
|
||||
elevation: { type: "number" },
|
||||
latitude: { type: "number" },
|
||||
longitude: { type: "number" },
|
||||
location_name: { type: "string" },
|
||||
time_zone: { type: "string" },
|
||||
unit_system: {
|
||||
type: 'object',
|
||||
type: "object",
|
||||
properties: {
|
||||
length: { type: 'string' },
|
||||
mass: { type: 'string' },
|
||||
temperature: { type: 'string' },
|
||||
volume: { type: 'string' }
|
||||
length: { type: "string" },
|
||||
mass: { type: "string" },
|
||||
temperature: { type: "string" },
|
||||
volume: { type: "string" },
|
||||
},
|
||||
required: ['length', 'mass', 'temperature', 'volume'],
|
||||
additionalProperties: false
|
||||
required: ["length", "mass", "temperature", "volume"],
|
||||
additionalProperties: false,
|
||||
},
|
||||
version: { type: 'string' }
|
||||
version: { type: "string" },
|
||||
},
|
||||
required: [
|
||||
'components',
|
||||
'config_dir',
|
||||
'elevation',
|
||||
'latitude',
|
||||
'longitude',
|
||||
'location_name',
|
||||
'time_zone',
|
||||
'unit_system',
|
||||
'version'
|
||||
"components",
|
||||
"config_dir",
|
||||
"elevation",
|
||||
"latitude",
|
||||
"longitude",
|
||||
"location_name",
|
||||
"time_zone",
|
||||
"unit_system",
|
||||
"version",
|
||||
],
|
||||
additionalProperties: false
|
||||
additionalProperties: false,
|
||||
} as const;
|
||||
184
src/security/__tests__/security.test.ts
Normal file
184
src/security/__tests__/security.test.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
import { describe, expect, it, beforeEach } from "bun:test";
|
||||
import { TokenManager } from "../index";
|
||||
import jwt from "jsonwebtoken";
|
||||
|
||||
const validSecret = "test_secret_that_is_at_least_32_chars_long";
|
||||
const testIp = "127.0.0.1";
|
||||
|
||||
// Mock the rate limit window for faster tests
|
||||
const MOCK_RATE_LIMIT_WINDOW = 100; // 100ms instead of 15 minutes
|
||||
|
||||
describe("Security Module", () => {
|
||||
beforeEach(() => {
|
||||
process.env.JWT_SECRET = validSecret;
|
||||
// Reset failed attempts map
|
||||
(TokenManager as any).failedAttempts = new Map();
|
||||
// Mock the security config
|
||||
(TokenManager as any).SECURITY_CONFIG = {
|
||||
...(TokenManager as any).SECURITY_CONFIG,
|
||||
LOCKOUT_DURATION: MOCK_RATE_LIMIT_WINDOW,
|
||||
MAX_FAILED_ATTEMPTS: 5,
|
||||
MAX_TOKEN_AGE: 30 * 24 * 60 * 60 * 1000 // 30 days
|
||||
};
|
||||
});
|
||||
|
||||
describe("TokenManager", () => {
|
||||
it("should encrypt and decrypt tokens", () => {
|
||||
const originalToken = "test-token";
|
||||
const encryptedToken = TokenManager.encryptToken(originalToken, validSecret);
|
||||
expect(encryptedToken).toBeDefined();
|
||||
expect(encryptedToken.includes(originalToken)).toBe(false);
|
||||
|
||||
const decryptedToken = TokenManager.decryptToken(encryptedToken, validSecret);
|
||||
expect(decryptedToken).toBeDefined();
|
||||
expect(decryptedToken).toBe(originalToken);
|
||||
});
|
||||
|
||||
it("should validate tokens correctly", () => {
|
||||
const payload = { userId: "123", role: "user" };
|
||||
const token = jwt.sign(payload, validSecret, { expiresIn: "1h" });
|
||||
const result = TokenManager.validateToken(token, testIp);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.error).toBeUndefined();
|
||||
|
||||
// Verify payload separately
|
||||
const decoded = jwt.verify(token, validSecret) as typeof payload;
|
||||
expect(decoded.userId).toBe(payload.userId);
|
||||
expect(decoded.role).toBe(payload.role);
|
||||
});
|
||||
|
||||
it("should handle empty tokens", () => {
|
||||
const result = TokenManager.validateToken("", testIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Invalid token format");
|
||||
});
|
||||
|
||||
it("should handle expired tokens", () => {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const payload = {
|
||||
userId: "123",
|
||||
role: "user",
|
||||
iat: now - 3600, // issued 1 hour ago
|
||||
exp: now - 1800 // expired 30 minutes ago
|
||||
};
|
||||
const token = jwt.sign(payload, validSecret);
|
||||
const result = TokenManager.validateToken(token, testIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Token has expired");
|
||||
});
|
||||
|
||||
it("should handle token tampering", () => {
|
||||
// Use a different IP for this test to avoid rate limiting
|
||||
const uniqueIp = "192.168.1.1";
|
||||
const payload = { userId: "123", role: "user" };
|
||||
const token = jwt.sign(payload, validSecret);
|
||||
const tamperedToken = token.slice(0, -5) + "xxxxx"; // Tamper with signature
|
||||
|
||||
const result = TokenManager.validateToken(tamperedToken, uniqueIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Invalid token signature");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Token Encryption", () => {
|
||||
it("should use different IVs for same token", () => {
|
||||
const token = "test-token";
|
||||
const encrypted1 = TokenManager.encryptToken(token, validSecret);
|
||||
const encrypted2 = TokenManager.encryptToken(token, validSecret);
|
||||
expect(encrypted1).toBeDefined();
|
||||
expect(encrypted2).toBeDefined();
|
||||
expect(encrypted1 === encrypted2).toBe(false);
|
||||
});
|
||||
|
||||
it("should handle large tokens", () => {
|
||||
const largeToken = "x".repeat(1024);
|
||||
const encrypted = TokenManager.encryptToken(largeToken, validSecret);
|
||||
const decrypted = TokenManager.decryptToken(encrypted, validSecret);
|
||||
expect(decrypted).toBe(largeToken);
|
||||
});
|
||||
|
||||
it("should fail gracefully with invalid encrypted data", () => {
|
||||
expect(() => TokenManager.decryptToken("invalid-encrypted-data", validSecret))
|
||||
.toThrow("Invalid encrypted token");
|
||||
});
|
||||
});
|
||||
|
||||
describe("Rate Limiting", () => {
|
||||
beforeEach(() => {
|
||||
// Reset failed attempts before each test
|
||||
(TokenManager as any).failedAttempts = new Map();
|
||||
});
|
||||
|
||||
it("should track failed attempts by IP", () => {
|
||||
const invalidToken = "x".repeat(64);
|
||||
const ip1 = "1.1.1.1";
|
||||
const ip2 = "2.2.2.2";
|
||||
|
||||
// Make a single failed attempt for each IP
|
||||
TokenManager.validateToken(invalidToken, ip1);
|
||||
TokenManager.validateToken(invalidToken, ip2);
|
||||
|
||||
const attempts = (TokenManager as any).failedAttempts;
|
||||
expect(attempts.has(ip1)).toBe(true);
|
||||
expect(attempts.has(ip2)).toBe(true);
|
||||
expect(attempts.get(ip1).count).toBe(1);
|
||||
expect(attempts.get(ip2).count).toBe(1);
|
||||
expect(attempts.get(ip1).lastAttempt).toBeGreaterThan(0);
|
||||
expect(attempts.get(ip2).lastAttempt).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should handle rate limiting for failed attempts", async () => {
|
||||
const invalidToken = "x".repeat(64);
|
||||
const uniqueIp = "10.0.0.1";
|
||||
|
||||
// Make multiple failed attempts
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const result = TokenManager.validateToken(invalidToken, uniqueIp);
|
||||
expect(result.valid).toBe(false);
|
||||
if (i < 4) {
|
||||
expect(result.error).toBe("Invalid token signature");
|
||||
} else {
|
||||
expect(result.error).toBe("Too many failed attempts. Please try again later.");
|
||||
}
|
||||
}
|
||||
|
||||
// Next attempt should be rate limited
|
||||
const result = TokenManager.validateToken(invalidToken, uniqueIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Too many failed attempts. Please try again later.");
|
||||
|
||||
// Wait for rate limit window to expire
|
||||
await new Promise(resolve => setTimeout(resolve, MOCK_RATE_LIMIT_WINDOW + 50));
|
||||
|
||||
// After window expires, should get normal error again
|
||||
const finalResult = TokenManager.validateToken(invalidToken, uniqueIp);
|
||||
expect(finalResult.valid).toBe(false);
|
||||
expect(finalResult.error).toBe("Invalid token signature");
|
||||
});
|
||||
|
||||
it("should reset rate limits after window expires", async () => {
|
||||
const invalidToken = "x".repeat(64);
|
||||
const uniqueIp = "172.16.0.1";
|
||||
|
||||
// Make some failed attempts
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const result = TokenManager.validateToken(invalidToken, uniqueIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Invalid token signature");
|
||||
}
|
||||
|
||||
// Wait for rate limit window to expire
|
||||
await new Promise(resolve => setTimeout(resolve, MOCK_RATE_LIMIT_WINDOW + 50));
|
||||
|
||||
// After window expires, should get normal error
|
||||
const result = TokenManager.validateToken(invalidToken, uniqueIp);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.error).toBe("Invalid token signature");
|
||||
|
||||
// Should have one new attempt recorded
|
||||
const attempts = (TokenManager as any).failedAttempts;
|
||||
expect(attempts.has(uniqueIp)).toBe(true);
|
||||
expect(attempts.get(uniqueIp).count).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,33 +1,64 @@
|
||||
import crypto from 'crypto';
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import rateLimit from 'express-rate-limit';
|
||||
import helmet from 'helmet';
|
||||
import { HelmetOptions } from 'helmet';
|
||||
import jwt from 'jsonwebtoken';
|
||||
import crypto from "crypto";
|
||||
import helmet from "helmet";
|
||||
import { HelmetOptions } from "helmet";
|
||||
import jwt from "jsonwebtoken";
|
||||
import { Elysia, type Context } from "elysia";
|
||||
|
||||
// Security configuration
|
||||
const RATE_LIMIT_WINDOW = 15 * 60 * 1000; // 15 minutes
|
||||
const RATE_LIMIT_MAX = 100; // requests per window
|
||||
const TOKEN_EXPIRY = 24 * 60 * 60 * 1000; // 24 hours
|
||||
|
||||
// Rate limiting middleware
|
||||
export const rateLimiter = rateLimit({
|
||||
windowMs: RATE_LIMIT_WINDOW,
|
||||
max: RATE_LIMIT_MAX,
|
||||
message: 'Too many requests from this IP, please try again later'
|
||||
// Rate limiting state
|
||||
const rateLimitStore = new Map<string, { count: number; resetTime: number }>();
|
||||
|
||||
interface RequestContext {
|
||||
request: Request;
|
||||
set: Context['set'];
|
||||
}
|
||||
|
||||
// Extracted rate limiting logic
|
||||
export function checkRateLimit(ip: string, maxRequests: number = RATE_LIMIT_MAX, windowMs: number = RATE_LIMIT_WINDOW) {
|
||||
const now = Date.now();
|
||||
|
||||
const record = rateLimitStore.get(ip) || {
|
||||
count: 0,
|
||||
resetTime: now + windowMs,
|
||||
};
|
||||
|
||||
if (now > record.resetTime) {
|
||||
record.count = 0;
|
||||
record.resetTime = now + windowMs;
|
||||
}
|
||||
|
||||
record.count++;
|
||||
rateLimitStore.set(ip, record);
|
||||
|
||||
if (record.count > maxRequests) {
|
||||
throw new Error("Too many requests from this IP, please try again later");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Rate limiting middleware for Elysia
|
||||
export const rateLimiter = new Elysia().derive(({ request }: RequestContext) => {
|
||||
const ip = request.headers.get("x-forwarded-for") || "unknown";
|
||||
checkRateLimit(ip);
|
||||
});
|
||||
|
||||
// Security configuration
|
||||
const helmetConfig: HelmetOptions = {
|
||||
// Extracted security headers logic
|
||||
export function applySecurityHeaders(request: Request, helmetConfig?: HelmetOptions) {
|
||||
const config: HelmetOptions = helmetConfig || {
|
||||
contentSecurityPolicy: {
|
||||
useDefaults: true,
|
||||
directives: {
|
||||
defaultSrc: ["'self'"],
|
||||
scriptSrc: ["'self'", "'unsafe-inline'"],
|
||||
styleSrc: ["'self'", "'unsafe-inline'"],
|
||||
imgSrc: ["'self'", 'data:', 'https:'],
|
||||
connectSrc: ["'self'", 'wss:', 'https:']
|
||||
}
|
||||
imgSrc: ["'self'", "data:", "https:"],
|
||||
connectSrc: ["'self'", "wss:", "https:"],
|
||||
},
|
||||
},
|
||||
dnsPrefetchControl: true,
|
||||
frameguard: true,
|
||||
@@ -36,27 +67,155 @@ const helmetConfig: HelmetOptions = {
|
||||
ieNoOpen: true,
|
||||
noSniff: true,
|
||||
referrerPolicy: {
|
||||
policy: ['no-referrer', 'strict-origin-when-cross-origin']
|
||||
}
|
||||
policy: ["no-referrer", "strict-origin-when-cross-origin"],
|
||||
},
|
||||
};
|
||||
|
||||
// Security headers middleware
|
||||
export const securityHeaders = helmet(helmetConfig);
|
||||
const headers = helmet(config);
|
||||
|
||||
const ALGORITHM = 'aes-256-gcm';
|
||||
// Apply helmet headers to the request
|
||||
Object.entries(headers).forEach(([key, value]) => {
|
||||
if (typeof value === 'string') {
|
||||
request.headers.set(key, value);
|
||||
}
|
||||
});
|
||||
|
||||
return headers;
|
||||
}
|
||||
|
||||
// Security headers middleware for Elysia
|
||||
export const securityHeaders = new Elysia().derive(({ request }: RequestContext) => {
|
||||
applySecurityHeaders(request);
|
||||
});
|
||||
|
||||
// Extracted request validation logic
|
||||
export function validateRequestHeaders(request: Request, requiredContentType = 'application/json') {
|
||||
// Validate content type for POST/PUT/PATCH requests
|
||||
if (["POST", "PUT", "PATCH"].includes(request.method)) {
|
||||
const contentType = request.headers.get("content-type");
|
||||
if (!contentType?.includes(requiredContentType)) {
|
||||
throw new Error(`Content-Type must be ${requiredContentType}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate request size
|
||||
const contentLength = request.headers.get("content-length");
|
||||
if (contentLength && parseInt(contentLength) > 1024 * 1024) {
|
||||
throw new Error("Request body too large");
|
||||
}
|
||||
|
||||
// Validate authorization header if required
|
||||
const authHeader = request.headers.get("authorization");
|
||||
if (authHeader) {
|
||||
const [type, token] = authHeader.split(" ");
|
||||
if (type !== "Bearer" || !token) {
|
||||
throw new Error("Invalid authorization header");
|
||||
}
|
||||
|
||||
const ip = request.headers.get("x-forwarded-for");
|
||||
const validation = TokenManager.validateToken(token, ip || undefined);
|
||||
if (!validation.valid) {
|
||||
throw new Error(validation.error || "Invalid token");
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Request validation middleware for Elysia
|
||||
export const validateRequest = new Elysia().derive(({ request }: RequestContext) => {
|
||||
validateRequestHeaders(request);
|
||||
});
|
||||
|
||||
// Extracted input sanitization logic
|
||||
export function sanitizeValue(value: unknown): unknown {
|
||||
if (typeof value === "string") {
|
||||
// Basic XSS protection
|
||||
return value
|
||||
.replace(/</g, "<")
|
||||
.replace(/>/g, ">")
|
||||
.replace(/"/g, """)
|
||||
.replace(/'/g, "'")
|
||||
.replace(/\//g, "/");
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(sanitizeValue);
|
||||
}
|
||||
|
||||
if (typeof value === "object" && value !== null) {
|
||||
return Object.fromEntries(
|
||||
Object.entries(value).map(([k, v]) => [k, sanitizeValue(v)])
|
||||
);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
// Input sanitization middleware for Elysia
|
||||
export const sanitizeInput = new Elysia().derive(async ({ request }: RequestContext) => {
|
||||
if (["POST", "PUT", "PATCH"].includes(request.method)) {
|
||||
const body = await request.json();
|
||||
request.json = () => Promise.resolve(sanitizeValue(body));
|
||||
}
|
||||
});
|
||||
|
||||
// Extracted error handling logic
|
||||
export function handleError(error: Error, env: string = process.env.NODE_ENV || 'production') {
|
||||
console.error("Error:", error);
|
||||
|
||||
const baseResponse = {
|
||||
error: true,
|
||||
message: "Internal server error",
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
if (env === 'development') {
|
||||
return {
|
||||
...baseResponse,
|
||||
error: error.message,
|
||||
stack: error.stack,
|
||||
};
|
||||
}
|
||||
|
||||
return baseResponse;
|
||||
}
|
||||
|
||||
// Error handling middleware for Elysia
|
||||
export const errorHandler = new Elysia().onError(({ error, set }: { error: Error; set: Context['set'] }) => {
|
||||
set.status = error instanceof jwt.JsonWebTokenError ? 401 : 500;
|
||||
return handleError(error);
|
||||
});
|
||||
|
||||
const ALGORITHM = "aes-256-gcm";
|
||||
const IV_LENGTH = 16;
|
||||
const AUTH_TAG_LENGTH = 16;
|
||||
|
||||
// Security configuration
|
||||
const SECURITY_CONFIG = {
|
||||
TOKEN_EXPIRY: 24 * 60 * 60 * 1000, // 24 hours
|
||||
MAX_TOKEN_AGE: 30 * 24 * 60 * 60 * 1000, // 30 days
|
||||
MIN_TOKEN_LENGTH: 32,
|
||||
MAX_FAILED_ATTEMPTS: 5,
|
||||
LOCKOUT_DURATION: 15 * 60 * 1000, // 15 minutes
|
||||
};
|
||||
|
||||
// Track failed authentication attempts
|
||||
const failedAttempts = new Map<
|
||||
string,
|
||||
{ count: number; lastAttempt: number }
|
||||
>();
|
||||
|
||||
export class TokenManager {
|
||||
/**
|
||||
* Encrypts a token using AES-256-GCM
|
||||
*/
|
||||
static encryptToken(token: string, key: string): string {
|
||||
if (!token || typeof token !== 'string') {
|
||||
throw new Error('Invalid token');
|
||||
if (!token || typeof token !== "string") {
|
||||
throw new Error("Invalid token");
|
||||
}
|
||||
if (!key || typeof key !== 'string' || key.length < 32) {
|
||||
throw new Error('Invalid encryption key');
|
||||
if (!key || typeof key !== "string" || key.length < 32) {
|
||||
throw new Error("Invalid encryption key");
|
||||
}
|
||||
|
||||
try {
|
||||
@@ -64,15 +223,15 @@ export class TokenManager {
|
||||
const cipher = crypto.createCipheriv(ALGORITHM, key.slice(0, 32), iv);
|
||||
|
||||
const encrypted = Buffer.concat([
|
||||
cipher.update(token, 'utf8'),
|
||||
cipher.final()
|
||||
cipher.update(token, "utf8"),
|
||||
cipher.final(),
|
||||
]);
|
||||
const tag = cipher.getAuthTag();
|
||||
|
||||
// Format: algorithm:iv:tag:encrypted
|
||||
return `${ALGORITHM}:${iv.toString('base64')}:${tag.toString('base64')}:${encrypted.toString('base64')}`;
|
||||
return `${ALGORITHM}:${iv.toString("base64")}:${tag.toString("base64")}:${encrypted.toString("base64")}`;
|
||||
} catch (error) {
|
||||
throw new Error('Failed to encrypt token');
|
||||
throw new Error("Failed to encrypt token");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -80,141 +239,183 @@ export class TokenManager {
|
||||
* Decrypts a token using AES-256-GCM
|
||||
*/
|
||||
static decryptToken(encryptedToken: string, key: string): string {
|
||||
if (!encryptedToken || typeof encryptedToken !== 'string') {
|
||||
throw new Error('Invalid encrypted token');
|
||||
if (!encryptedToken || typeof encryptedToken !== "string") {
|
||||
throw new Error("Invalid encrypted token");
|
||||
}
|
||||
if (!key || typeof key !== 'string' || key.length < 32) {
|
||||
throw new Error('Invalid encryption key');
|
||||
if (!key || typeof key !== "string" || key.length < 32) {
|
||||
throw new Error("Invalid encryption key");
|
||||
}
|
||||
|
||||
try {
|
||||
const [algorithm, ivBase64, tagBase64, encryptedBase64] = encryptedToken.split(':');
|
||||
const [algorithm, ivBase64, tagBase64, encryptedBase64] =
|
||||
encryptedToken.split(":");
|
||||
|
||||
if (algorithm !== ALGORITHM || !ivBase64 || !tagBase64 || !encryptedBase64) {
|
||||
throw new Error('Invalid encrypted token format');
|
||||
if (
|
||||
algorithm !== ALGORITHM ||
|
||||
!ivBase64 ||
|
||||
!tagBase64 ||
|
||||
!encryptedBase64
|
||||
) {
|
||||
throw new Error("Invalid encrypted token format");
|
||||
}
|
||||
|
||||
const iv = Buffer.from(ivBase64, 'base64');
|
||||
const tag = Buffer.from(tagBase64, 'base64');
|
||||
const encrypted = Buffer.from(encryptedBase64, 'base64');
|
||||
const iv = Buffer.from(ivBase64, "base64");
|
||||
const tag = Buffer.from(tagBase64, "base64");
|
||||
const encrypted = Buffer.from(encryptedBase64, "base64");
|
||||
|
||||
const decipher = crypto.createDecipheriv(ALGORITHM, key.slice(0, 32), iv);
|
||||
decipher.setAuthTag(tag);
|
||||
|
||||
return Buffer.concat([
|
||||
decipher.update(encrypted),
|
||||
decipher.final()
|
||||
]).toString('utf8');
|
||||
decipher.final(),
|
||||
]).toString("utf8");
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.message === 'Invalid encrypted token format') {
|
||||
if (
|
||||
error instanceof Error &&
|
||||
error.message === "Invalid encrypted token format"
|
||||
) {
|
||||
throw error;
|
||||
}
|
||||
throw new Error('Invalid encrypted token');
|
||||
throw new Error("Invalid encrypted token");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validates a JWT token
|
||||
* Validates a JWT token with enhanced security checks
|
||||
*/
|
||||
static validateToken(token: string): boolean {
|
||||
if (!token || typeof token !== 'string') {
|
||||
return false;
|
||||
static validateToken(
|
||||
token: string | undefined | null,
|
||||
ip?: string,
|
||||
): { valid: boolean; error?: string } {
|
||||
// Check basic token format
|
||||
if (!token || typeof token !== "string") {
|
||||
return { valid: false, error: "Invalid token format" };
|
||||
}
|
||||
|
||||
// Check for token length
|
||||
if (token.length < SECURITY_CONFIG.MIN_TOKEN_LENGTH) {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
return { valid: false, error: "Token length below minimum requirement" };
|
||||
}
|
||||
|
||||
// Check for rate limiting
|
||||
if (ip && this.isRateLimited(ip)) {
|
||||
return {
|
||||
valid: false,
|
||||
error: "Too many failed attempts. Please try again later.",
|
||||
};
|
||||
}
|
||||
|
||||
// Get JWT secret
|
||||
const secret = process.env.JWT_SECRET;
|
||||
if (!secret) {
|
||||
return { valid: false, error: "JWT secret not configured" };
|
||||
}
|
||||
|
||||
try {
|
||||
const decoded = jwt.decode(token);
|
||||
if (!decoded || typeof decoded !== 'object') {
|
||||
return false;
|
||||
// Verify token signature and decode
|
||||
const decoded = jwt.verify(token, secret, {
|
||||
algorithms: ["HS256"],
|
||||
clockTolerance: 0, // No clock skew tolerance
|
||||
ignoreExpiration: false, // Always check expiration
|
||||
}) as jwt.JwtPayload;
|
||||
|
||||
// Verify token structure
|
||||
if (!decoded || typeof decoded !== "object") {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
return { valid: false, error: "Invalid token structure" };
|
||||
}
|
||||
|
||||
// Check for expiration
|
||||
if (!decoded.exp) {
|
||||
return false;
|
||||
// Check required claims
|
||||
if (!decoded.exp || !decoded.iat) {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
return { valid: false, error: "Token missing required claims" };
|
||||
}
|
||||
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
|
||||
// Check expiration
|
||||
if (decoded.exp <= now) {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
return { valid: false, error: "Token has expired" };
|
||||
}
|
||||
|
||||
// Check token age
|
||||
const tokenAge = (now - decoded.iat) * 1000;
|
||||
if (tokenAge > SECURITY_CONFIG.MAX_TOKEN_AGE) {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
return { valid: false, error: "Token exceeds maximum age limit" };
|
||||
}
|
||||
|
||||
// Reset failed attempts on successful validation
|
||||
if (ip) {
|
||||
failedAttempts.delete(ip);
|
||||
}
|
||||
|
||||
return { valid: true };
|
||||
} catch (error) {
|
||||
if (ip) this.recordFailedAttempt(ip);
|
||||
if (error instanceof jwt.TokenExpiredError) {
|
||||
return { valid: false, error: "Token has expired" };
|
||||
}
|
||||
if (error instanceof jwt.JsonWebTokenError) {
|
||||
return { valid: false, error: "Invalid token signature" };
|
||||
}
|
||||
return { valid: false, error: "Token validation failed" };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Records a failed authentication attempt for rate limiting
|
||||
*/
|
||||
private static recordFailedAttempt(ip?: string): void {
|
||||
if (!ip) return;
|
||||
|
||||
const attempt = failedAttempts.get(ip) || {
|
||||
count: 0,
|
||||
lastAttempt: Date.now(),
|
||||
};
|
||||
attempt.count++;
|
||||
attempt.lastAttempt = Date.now();
|
||||
failedAttempts.set(ip, attempt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if an IP is rate limited due to too many failed attempts
|
||||
*/
|
||||
private static isRateLimited(ip: string): boolean {
|
||||
const attempt = failedAttempts.get(ip);
|
||||
if (!attempt) return false;
|
||||
|
||||
// Reset if lockout duration has passed
|
||||
if (Date.now() - attempt.lastAttempt >= SECURITY_CONFIG.LOCKOUT_DURATION) {
|
||||
failedAttempts.delete(ip);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify signature using the secret from environment variable
|
||||
return attempt.count >= SECURITY_CONFIG.MAX_FAILED_ATTEMPTS;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a new JWT token
|
||||
*/
|
||||
static generateToken(payload: Record<string, any>): string {
|
||||
const secret = process.env.JWT_SECRET;
|
||||
if (!secret) {
|
||||
return false;
|
||||
throw new Error("JWT secret not configured");
|
||||
}
|
||||
|
||||
try {
|
||||
jwt.verify(token, secret);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Add required claims
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const tokenPayload = {
|
||||
...payload,
|
||||
iat: now,
|
||||
exp: now + Math.floor(TOKEN_EXPIRY / 1000),
|
||||
};
|
||||
|
||||
// Request validation middleware
|
||||
export function validateRequest(req: Request, res: Response, next: NextFunction) {
|
||||
// Skip validation for health and MCP schema endpoints
|
||||
if (req.path === '/health' || req.path === '/mcp') {
|
||||
return next();
|
||||
}
|
||||
|
||||
// Validate content type
|
||||
if (req.method !== 'GET' && !req.is('application/json')) {
|
||||
return res.status(415).json({
|
||||
error: 'Unsupported Media Type - Content-Type must be application/json'
|
||||
return jwt.sign(tokenPayload, secret, {
|
||||
algorithm: "HS256",
|
||||
});
|
||||
}
|
||||
|
||||
// Validate token
|
||||
const token = req.headers.authorization?.replace('Bearer ', '');
|
||||
if (!token || !TokenManager.validateToken(token)) {
|
||||
return res.status(401).json({
|
||||
error: 'Invalid or expired token'
|
||||
});
|
||||
}
|
||||
|
||||
// Validate request body
|
||||
if (req.method !== 'GET' && (!req.body || typeof req.body !== 'object')) {
|
||||
return res.status(400).json({
|
||||
error: 'Invalid request body'
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
// Input sanitization middleware
|
||||
export function sanitizeInput(req: Request, res: Response, next: NextFunction) {
|
||||
if (req.body && typeof req.body === 'object') {
|
||||
const sanitized = JSON.parse(
|
||||
JSON.stringify(req.body).replace(/[<>]/g, '')
|
||||
);
|
||||
req.body = sanitized;
|
||||
}
|
||||
next();
|
||||
}
|
||||
|
||||
// Error handling middleware
|
||||
export function errorHandler(err: Error, req: Request, res: Response, next: NextFunction) {
|
||||
console.error(err.stack);
|
||||
res.status(500).json({
|
||||
error: 'Internal Server Error',
|
||||
message: process.env.NODE_ENV === 'development' ? err.message : undefined
|
||||
});
|
||||
}
|
||||
|
||||
// Export security middleware chain
|
||||
export const securityMiddleware = [
|
||||
helmet(helmetConfig),
|
||||
rateLimit({
|
||||
windowMs: 15 * 60 * 1000,
|
||||
max: 100
|
||||
}),
|
||||
validateRequest,
|
||||
sanitizeInput,
|
||||
errorHandler
|
||||
];
|
||||
@@ -1,9 +1,9 @@
|
||||
import { Request, Response, NextFunction } from 'express';
|
||||
import { validateRequest, sanitizeInput } from '../../src/security/middleware';
|
||||
import { Request, Response, NextFunction } from "express";
|
||||
import { validateRequest, sanitizeInput } from "../../src/security/middleware";
|
||||
|
||||
type MockRequest = {
|
||||
headers: {
|
||||
'content-type'?: string;
|
||||
"content-type"?: string;
|
||||
authorization?: string;
|
||||
};
|
||||
body?: any;
|
||||
@@ -16,7 +16,7 @@ type MockResponse = {
|
||||
setHeader: jest.MockInstance<MockResponse, [name: string, value: string]>;
|
||||
};
|
||||
|
||||
describe('Security Middleware', () => {
|
||||
describe("Security Middleware", () => {
|
||||
let mockRequest: MockRequest;
|
||||
let mockResponse: MockResponse;
|
||||
let nextFunction: jest.Mock;
|
||||
@@ -25,83 +25,117 @@ describe('Security Middleware', () => {
|
||||
mockRequest = {
|
||||
headers: {},
|
||||
body: {},
|
||||
is: jest.fn<string | false | null, [string | string[]]>().mockReturnValue('json')
|
||||
is: jest
|
||||
.fn<string | false | null, [string | string[]]>()
|
||||
.mockReturnValue("json"),
|
||||
};
|
||||
|
||||
mockResponse = {
|
||||
status: jest.fn<MockResponse, [number]>().mockReturnThis(),
|
||||
json: jest.fn<MockResponse, [any]>().mockReturnThis(),
|
||||
setHeader: jest.fn<MockResponse, [string, string]>().mockReturnThis()
|
||||
setHeader: jest.fn<MockResponse, [string, string]>().mockReturnThis(),
|
||||
};
|
||||
|
||||
nextFunction = jest.fn();
|
||||
});
|
||||
|
||||
describe('validateRequest', () => {
|
||||
it('should pass valid requests', () => {
|
||||
mockRequest.headers.authorization = 'Bearer valid-token';
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
describe("validateRequest", () => {
|
||||
it("should pass valid requests", () => {
|
||||
mockRequest.headers.authorization = "Bearer valid-token";
|
||||
validateRequest(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should reject requests without authorization header', () => {
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
it("should reject requests without authorization header", () => {
|
||||
validateRequest(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||
error: expect.stringContaining('authorization')
|
||||
}));
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.stringContaining("authorization"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should reject requests with invalid authorization format', () => {
|
||||
mockRequest.headers.authorization = 'invalid-format';
|
||||
validateRequest(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
it("should reject requests with invalid authorization format", () => {
|
||||
mockRequest.headers.authorization = "invalid-format";
|
||||
validateRequest(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockResponse.status).toHaveBeenCalledWith(401);
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(expect.objectContaining({
|
||||
error: expect.stringContaining('Bearer')
|
||||
}));
|
||||
expect(mockResponse.json).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
error: expect.stringContaining("Bearer"),
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('sanitizeInput', () => {
|
||||
it('should pass requests without body', () => {
|
||||
describe("sanitizeInput", () => {
|
||||
it("should pass requests without body", () => {
|
||||
delete mockRequest.body;
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
sanitizeInput(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should sanitize HTML in request body', () => {
|
||||
it("should sanitize HTML in request body", () => {
|
||||
mockRequest.body = {
|
||||
text: '<script>alert("xss")</script>Hello',
|
||||
nested: {
|
||||
html: '<img src="x" onerror="alert(1)">World'
|
||||
}
|
||||
html: '<img src="x" onerror="alert(1)">World',
|
||||
},
|
||||
};
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockRequest.body.text).toBe('Hello');
|
||||
expect(mockRequest.body.nested.html).toBe('World');
|
||||
sanitizeInput(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body.text).toBe("Hello");
|
||||
expect(mockRequest.body.nested.html).toBe("World");
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle non-object bodies', () => {
|
||||
mockRequest.body = '<p>text</p>';
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
expect(mockRequest.body).toBe('text');
|
||||
it("should handle non-object bodies", () => {
|
||||
mockRequest.body = "<p>text</p>";
|
||||
sanitizeInput(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body).toBe("text");
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve non-string values', () => {
|
||||
it("should preserve non-string values", () => {
|
||||
mockRequest.body = {
|
||||
number: 42,
|
||||
boolean: true,
|
||||
null: null,
|
||||
array: [1, 2, 3]
|
||||
array: [1, 2, 3],
|
||||
};
|
||||
sanitizeInput(mockRequest as unknown as Request, mockResponse as unknown as Response, nextFunction);
|
||||
sanitizeInput(
|
||||
mockRequest as unknown as Request,
|
||||
mockResponse as unknown as Response,
|
||||
nextFunction,
|
||||
);
|
||||
expect(mockRequest.body).toEqual({
|
||||
number: 42,
|
||||
boolean: true,
|
||||
null: null,
|
||||
array: [1, 2, 3]
|
||||
array: [1, 2, 3],
|
||||
});
|
||||
expect(nextFunction).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
0
src/speech/__tests__/fixtures/test.wav
Normal file
0
src/speech/__tests__/fixtures/test.wav
Normal file
116
src/speech/__tests__/speechToText.test.ts
Normal file
116
src/speech/__tests__/speechToText.test.ts
Normal file
@@ -0,0 +1,116 @@
|
||||
import { SpeechToText, WakeWordEvent, TranscriptionError } from '../speechToText';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
describe('SpeechToText', () => {
|
||||
let speechToText: SpeechToText;
|
||||
const testAudioDir = path.join(__dirname, 'test_audio');
|
||||
|
||||
beforeEach(() => {
|
||||
speechToText = new SpeechToText('fast-whisper');
|
||||
// Create test audio directory if it doesn't exist
|
||||
if (!fs.existsSync(testAudioDir)) {
|
||||
fs.mkdirSync(testAudioDir, { recursive: true });
|
||||
}
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
speechToText.stopWakeWordDetection();
|
||||
// Clean up test files
|
||||
if (fs.existsSync(testAudioDir)) {
|
||||
fs.rmSync(testAudioDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
describe('checkHealth', () => {
|
||||
it('should handle Docker not being available', async () => {
|
||||
const isHealthy = await speechToText.checkHealth();
|
||||
expect(isHealthy).toBeDefined();
|
||||
expect(isHealthy).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('wake word detection', () => {
|
||||
it('should detect new audio files and emit wake word events', (done) => {
|
||||
const testFile = path.join(testAudioDir, 'wake_word_test_123456.wav');
|
||||
const testMetadata = `${testFile}.json`;
|
||||
|
||||
speechToText.startWakeWordDetection(testAudioDir);
|
||||
|
||||
speechToText.on('wake_word', (event: WakeWordEvent) => {
|
||||
expect(event).toBeDefined();
|
||||
expect(event.audioFile).toBe(testFile);
|
||||
expect(event.metadataFile).toBe(testMetadata);
|
||||
expect(event.timestamp).toBe('123456');
|
||||
done();
|
||||
});
|
||||
|
||||
// Create a test audio file to trigger the event
|
||||
fs.writeFileSync(testFile, 'test audio content');
|
||||
}, 1000);
|
||||
|
||||
it('should handle transcription errors when Docker is not available', (done) => {
|
||||
const testFile = path.join(testAudioDir, 'wake_word_test_123456.wav');
|
||||
|
||||
let errorEmitted = false;
|
||||
let wakeWordEmitted = false;
|
||||
|
||||
const checkDone = () => {
|
||||
if (errorEmitted && wakeWordEmitted) {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
speechToText.on('error', (error) => {
|
||||
expect(error).toBeDefined();
|
||||
expect(error).toBeInstanceOf(TranscriptionError);
|
||||
expect(error.message).toContain('Failed to start Docker process');
|
||||
errorEmitted = true;
|
||||
checkDone();
|
||||
});
|
||||
|
||||
speechToText.on('wake_word', () => {
|
||||
wakeWordEmitted = true;
|
||||
checkDone();
|
||||
});
|
||||
|
||||
speechToText.startWakeWordDetection(testAudioDir);
|
||||
|
||||
// Create a test audio file to trigger the event
|
||||
fs.writeFileSync(testFile, 'test audio content');
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
describe('transcribeAudio', () => {
|
||||
it('should handle Docker not being available for transcription', async () => {
|
||||
await expect(
|
||||
speechToText.transcribeAudio('/audio/test.wav')
|
||||
).rejects.toThrow(TranscriptionError);
|
||||
});
|
||||
|
||||
it('should emit progress events on error', (done) => {
|
||||
let progressEmitted = false;
|
||||
let errorThrown = false;
|
||||
|
||||
const checkDone = () => {
|
||||
if (progressEmitted && errorThrown) {
|
||||
done();
|
||||
}
|
||||
};
|
||||
|
||||
speechToText.on('progress', (event: { type: string; data: string }) => {
|
||||
expect(event.type).toBe('stderr');
|
||||
expect(event.data).toBe('Failed to start Docker process');
|
||||
progressEmitted = true;
|
||||
checkDone();
|
||||
});
|
||||
|
||||
speechToText.transcribeAudio('/audio/test.wav')
|
||||
.catch((error) => {
|
||||
expect(error).toBeInstanceOf(TranscriptionError);
|
||||
errorThrown = true;
|
||||
checkDone();
|
||||
});
|
||||
}, 1000);
|
||||
});
|
||||
});
|
||||
110
src/speech/index.ts
Normal file
110
src/speech/index.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
import { logger } from "../utils/logger.js";
|
||||
import type { IWakeWordDetector, ISpeechToText } from "./types.js";
|
||||
|
||||
class SpeechService {
|
||||
private static instance: SpeechService | null = null;
|
||||
private isInitialized: boolean = false;
|
||||
private wakeWordDetector: IWakeWordDetector | null = null;
|
||||
private speechToText: ISpeechToText | null = null;
|
||||
|
||||
private constructor() { }
|
||||
|
||||
public static getInstance(): SpeechService {
|
||||
if (!SpeechService.instance) {
|
||||
SpeechService.instance = new SpeechService();
|
||||
}
|
||||
return SpeechService.instance;
|
||||
}
|
||||
|
||||
public async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!APP_CONFIG.SPEECH.ENABLED) {
|
||||
logger.info("Speech features are disabled. Skipping initialization.");
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Initialize components based on configuration
|
||||
if (APP_CONFIG.SPEECH.WAKE_WORD_ENABLED) {
|
||||
logger.info("Initializing wake word detection...");
|
||||
// Dynamic import to avoid loading the module if not needed
|
||||
const { WakeWordDetector } = await import("./wakeWordDetector.js");
|
||||
this.wakeWordDetector = new WakeWordDetector() as IWakeWordDetector;
|
||||
await this.wakeWordDetector.initialize();
|
||||
}
|
||||
|
||||
if (APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED) {
|
||||
logger.info("Initializing speech-to-text...");
|
||||
// Dynamic import to avoid loading the module if not needed
|
||||
const { SpeechToText } = await import("./speechToText.js");
|
||||
this.speechToText = new SpeechToText({
|
||||
modelPath: APP_CONFIG.SPEECH.WHISPER_MODEL_PATH,
|
||||
modelType: APP_CONFIG.SPEECH.WHISPER_MODEL_TYPE,
|
||||
}) as ISpeechToText;
|
||||
await this.speechToText.initialize();
|
||||
}
|
||||
|
||||
this.isInitialized = true;
|
||||
logger.info("Speech service initialized successfully");
|
||||
} catch (error) {
|
||||
logger.error("Failed to initialize speech service:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async shutdown(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (this.wakeWordDetector) {
|
||||
await this.wakeWordDetector.shutdown();
|
||||
this.wakeWordDetector = null;
|
||||
}
|
||||
|
||||
if (this.speechToText) {
|
||||
await this.speechToText.shutdown();
|
||||
this.speechToText = null;
|
||||
}
|
||||
|
||||
this.isInitialized = false;
|
||||
logger.info("Speech service shut down successfully");
|
||||
} catch (error) {
|
||||
logger.error("Error during speech service shutdown:", error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public isEnabled(): boolean {
|
||||
return APP_CONFIG.SPEECH.ENABLED;
|
||||
}
|
||||
|
||||
public isWakeWordEnabled(): boolean {
|
||||
return APP_CONFIG.SPEECH.WAKE_WORD_ENABLED;
|
||||
}
|
||||
|
||||
public isSpeechToTextEnabled(): boolean {
|
||||
return APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED;
|
||||
}
|
||||
|
||||
public getWakeWordDetector(): IWakeWordDetector {
|
||||
if (!this.isInitialized || !this.wakeWordDetector) {
|
||||
throw new Error("Wake word detector is not initialized");
|
||||
}
|
||||
return this.wakeWordDetector;
|
||||
}
|
||||
|
||||
public getSpeechToText(): ISpeechToText {
|
||||
if (!this.isInitialized || !this.speechToText) {
|
||||
throw new Error("Speech-to-text is not initialized");
|
||||
}
|
||||
return this.speechToText;
|
||||
}
|
||||
}
|
||||
|
||||
export const speechService = SpeechService.getInstance();
|
||||
247
src/speech/speechToText.ts
Normal file
247
src/speech/speechToText.ts
Normal file
@@ -0,0 +1,247 @@
|
||||
import { spawn } from 'child_process';
|
||||
import { EventEmitter } from 'events';
|
||||
import { watch } from 'fs';
|
||||
import path from 'path';
|
||||
import { ISpeechToText, SpeechToTextConfig } from "./types.js";
|
||||
|
||||
export interface TranscriptionOptions {
|
||||
model?: 'tiny.en' | 'base.en' | 'small.en' | 'medium.en' | 'large-v2';
|
||||
language?: string;
|
||||
temperature?: number;
|
||||
beamSize?: number;
|
||||
patience?: number;
|
||||
device?: 'cpu' | 'cuda';
|
||||
}
|
||||
|
||||
export interface TranscriptionResult {
|
||||
text: string;
|
||||
segments: Array<{
|
||||
text: string;
|
||||
start: number;
|
||||
end: number;
|
||||
confidence: number;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface WakeWordEvent {
|
||||
timestamp: string;
|
||||
audioFile: string;
|
||||
metadataFile: string;
|
||||
}
|
||||
|
||||
export class TranscriptionError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'TranscriptionError';
|
||||
}
|
||||
}
|
||||
|
||||
export class SpeechToText extends EventEmitter implements ISpeechToText {
|
||||
private containerName: string;
|
||||
private audioWatcher?: ReturnType<typeof watch>;
|
||||
private modelPath: string;
|
||||
private modelType: string;
|
||||
private isInitialized: boolean = false;
|
||||
|
||||
constructor(config: SpeechToTextConfig) {
|
||||
super();
|
||||
this.containerName = config.containerName || 'fast-whisper';
|
||||
this.modelPath = config.modelPath;
|
||||
this.modelType = config.modelType;
|
||||
}
|
||||
|
||||
public async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Initialization logic will be implemented here
|
||||
await this.setupContainer();
|
||||
this.isInitialized = true;
|
||||
this.emit('ready');
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async shutdown(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
// Cleanup logic will be implemented here
|
||||
await this.cleanupContainer();
|
||||
this.isInitialized = false;
|
||||
this.emit('shutdown');
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async transcribe(audioData: Buffer): Promise<string> {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error("Speech-to-text service is not initialized");
|
||||
}
|
||||
try {
|
||||
// Transcription logic will be implemented here
|
||||
this.emit('transcribing');
|
||||
const result = await this.processAudio(audioData);
|
||||
this.emit('transcribed', result);
|
||||
return result;
|
||||
} catch (error) {
|
||||
this.emit('error', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async setupContainer(): Promise<void> {
|
||||
// Container setup logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
|
||||
private async cleanupContainer(): Promise<void> {
|
||||
// Container cleanup logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
|
||||
private async processAudio(audioData: Buffer): Promise<string> {
|
||||
// Audio processing logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
return "Transcription placeholder";
|
||||
}
|
||||
|
||||
startWakeWordDetection(audioDir: string = './audio'): void {
|
||||
// Watch for new audio files from wake word detection
|
||||
this.audioWatcher = watch(audioDir, (eventType, filename) => {
|
||||
if (eventType === 'rename' && filename && filename.startsWith('wake_word_') && filename.endsWith('.wav')) {
|
||||
const audioFile = path.join(audioDir, filename);
|
||||
const metadataFile = `${audioFile}.json`;
|
||||
const parts = filename.split('_');
|
||||
const timestamp = parts[parts.length - 1].split('.')[0];
|
||||
|
||||
// Emit wake word event
|
||||
this.emit('wake_word', {
|
||||
timestamp,
|
||||
audioFile,
|
||||
metadataFile
|
||||
} as WakeWordEvent);
|
||||
|
||||
// Automatically transcribe the wake word audio
|
||||
this.transcribeAudio(audioFile)
|
||||
.then(result => {
|
||||
this.emit('transcription', { audioFile, result });
|
||||
})
|
||||
.catch(error => {
|
||||
this.emit('error', error);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
stopWakeWordDetection(): void {
|
||||
if (this.audioWatcher) {
|
||||
this.audioWatcher.close();
|
||||
this.audioWatcher = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
async transcribeAudio(
|
||||
audioFilePath: string,
|
||||
options: TranscriptionOptions = {}
|
||||
): Promise<TranscriptionResult> {
|
||||
const {
|
||||
model = 'base.en',
|
||||
language = 'en',
|
||||
temperature = 0,
|
||||
beamSize = 5,
|
||||
patience = 1,
|
||||
device = 'cpu'
|
||||
} = options;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const args = [
|
||||
'exec',
|
||||
this.containerName,
|
||||
'fast-whisper',
|
||||
'--model', model,
|
||||
'--language', language,
|
||||
'--temperature', temperature.toString(),
|
||||
'--beam-size', beamSize.toString(),
|
||||
'--patience', patience.toString(),
|
||||
'--device', device,
|
||||
'--output-json',
|
||||
audioFilePath
|
||||
];
|
||||
|
||||
let process;
|
||||
try {
|
||||
process = spawn('docker', args);
|
||||
} catch (error) {
|
||||
this.emit('progress', { type: 'stderr', data: 'Failed to start Docker process' });
|
||||
reject(new TranscriptionError('Failed to start Docker process'));
|
||||
return;
|
||||
}
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
|
||||
process.stdout?.on('data', (data: Buffer) => {
|
||||
stdout += data.toString();
|
||||
this.emit('progress', { type: 'stdout', data: data.toString() });
|
||||
});
|
||||
|
||||
process.stderr?.on('data', (data: Buffer) => {
|
||||
stderr += data.toString();
|
||||
this.emit('progress', { type: 'stderr', data: data.toString() });
|
||||
});
|
||||
|
||||
process.on('error', (error: Error) => {
|
||||
this.emit('progress', { type: 'stderr', data: error.message });
|
||||
reject(new TranscriptionError(`Failed to execute Docker command: ${error.message}`));
|
||||
});
|
||||
|
||||
process.on('close', (code: number) => {
|
||||
if (code !== 0) {
|
||||
reject(new TranscriptionError(`Transcription failed: ${stderr}`));
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = JSON.parse(stdout) as TranscriptionResult;
|
||||
resolve(result);
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
reject(new TranscriptionError(`Failed to parse transcription result: ${error.message}`));
|
||||
} else {
|
||||
reject(new TranscriptionError('Failed to parse transcription result: Unknown error'));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async checkHealth(): Promise<boolean> {
|
||||
try {
|
||||
const process = spawn('docker', ['ps', '--filter', `name=${this.containerName}`, '--format', '{{.Status}}']);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
let output = '';
|
||||
process.stdout?.on('data', (data: Buffer) => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
process.on('error', () => {
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
process.on('close', (code: number) => {
|
||||
resolve(code === 0 && output.toLowerCase().includes('up'));
|
||||
});
|
||||
});
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
20
src/speech/types.ts
Normal file
20
src/speech/types.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
import { EventEmitter } from "events";
|
||||
|
||||
export interface IWakeWordDetector {
|
||||
initialize(): Promise<void>;
|
||||
shutdown(): Promise<void>;
|
||||
startListening(): Promise<void>;
|
||||
stopListening(): Promise<void>;
|
||||
}
|
||||
|
||||
export interface ISpeechToText extends EventEmitter {
|
||||
initialize(): Promise<void>;
|
||||
shutdown(): Promise<void>;
|
||||
transcribe(audioData: Buffer): Promise<string>;
|
||||
}
|
||||
|
||||
export interface SpeechToTextConfig {
|
||||
modelPath: string;
|
||||
modelType: string;
|
||||
containerName?: string;
|
||||
}
|
||||
64
src/speech/wakeWordDetector.ts
Normal file
64
src/speech/wakeWordDetector.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
import { IWakeWordDetector } from "./types.js";
|
||||
|
||||
export class WakeWordDetector implements IWakeWordDetector {
|
||||
private isListening: boolean = false;
|
||||
private isInitialized: boolean = false;
|
||||
|
||||
public async initialize(): Promise<void> {
|
||||
if (this.isInitialized) {
|
||||
return;
|
||||
}
|
||||
// Initialization logic will be implemented here
|
||||
await this.setupDetector();
|
||||
this.isInitialized = true;
|
||||
}
|
||||
|
||||
public async shutdown(): Promise<void> {
|
||||
if (this.isListening) {
|
||||
await this.stopListening();
|
||||
}
|
||||
if (this.isInitialized) {
|
||||
await this.cleanupDetector();
|
||||
this.isInitialized = false;
|
||||
}
|
||||
}
|
||||
|
||||
public async startListening(): Promise<void> {
|
||||
if (!this.isInitialized) {
|
||||
throw new Error("Wake word detector is not initialized");
|
||||
}
|
||||
if (this.isListening) {
|
||||
return;
|
||||
}
|
||||
await this.startDetection();
|
||||
this.isListening = true;
|
||||
}
|
||||
|
||||
public async stopListening(): Promise<void> {
|
||||
if (!this.isListening) {
|
||||
return;
|
||||
}
|
||||
await this.stopDetection();
|
||||
this.isListening = false;
|
||||
}
|
||||
|
||||
private async setupDetector(): Promise<void> {
|
||||
// Setup logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
|
||||
private async cleanupDetector(): Promise<void> {
|
||||
// Cleanup logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
|
||||
private async startDetection(): Promise<void> {
|
||||
// Start detection logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
|
||||
private async stopDetection(): Promise<void> {
|
||||
// Stop detection logic will be implemented here
|
||||
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||
}
|
||||
}
|
||||
230
src/sse/__tests__/sse.security.test.ts
Normal file
230
src/sse/__tests__/sse.security.test.ts
Normal file
@@ -0,0 +1,230 @@
|
||||
import { SSEManager } from "../index";
|
||||
import { TokenManager } from "../../security/index";
|
||||
import type { SSEClient } from "../types";
|
||||
import {
|
||||
describe,
|
||||
it,
|
||||
expect,
|
||||
beforeEach,
|
||||
afterEach,
|
||||
mock,
|
||||
Mock,
|
||||
} from "bun:test";
|
||||
|
||||
describe("SSE Security Features", () => {
|
||||
const TEST_IP = "127.0.0.1";
|
||||
const validToken = "valid_token";
|
||||
let sseManager: SSEManager;
|
||||
let validateTokenMock: Mock<
|
||||
(token: string, ip: string) => { valid: boolean; error?: string }
|
||||
>;
|
||||
|
||||
beforeEach(() => {
|
||||
sseManager = new SSEManager({
|
||||
maxClients: 2,
|
||||
rateLimit: {
|
||||
MAX_MESSAGES: 2,
|
||||
WINDOW_MS: 1000,
|
||||
BURST_LIMIT: 1,
|
||||
},
|
||||
});
|
||||
|
||||
validateTokenMock = mock((token: string) => ({
|
||||
valid: token === validToken,
|
||||
error: token !== validToken ? "Invalid token" : undefined,
|
||||
}));
|
||||
TokenManager.validateToken = validateTokenMock;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
validateTokenMock.mockReset();
|
||||
});
|
||||
|
||||
function createTestClient(
|
||||
id: string,
|
||||
): Omit<SSEClient, "authenticated" | "subscriptions" | "rateLimit"> {
|
||||
return {
|
||||
id,
|
||||
ip: TEST_IP,
|
||||
connectedAt: new Date(),
|
||||
connectionTime: Date.now(),
|
||||
send: mock((data: string) => {}),
|
||||
};
|
||||
}
|
||||
|
||||
describe("Client Authentication", () => {
|
||||
it("should authenticate valid clients", () => {
|
||||
const client = createTestClient("test-client-1");
|
||||
const result = sseManager.addClient(client, validToken);
|
||||
|
||||
expect(result).toBeTruthy();
|
||||
expect(validateTokenMock).toHaveBeenCalledWith(validToken, TEST_IP);
|
||||
expect(result?.authenticated).toBe(true);
|
||||
});
|
||||
|
||||
it("should reject invalid tokens", () => {
|
||||
const client = createTestClient("test-client-2");
|
||||
const result = sseManager.addClient(client, "invalid_token");
|
||||
|
||||
expect(result).toBeNull();
|
||||
expect(validateTokenMock).toHaveBeenCalledWith("invalid_token", TEST_IP);
|
||||
});
|
||||
|
||||
it("should enforce maximum client limit", () => {
|
||||
// Add max number of clients
|
||||
const client1 = createTestClient("test-client-0");
|
||||
const client2 = createTestClient("test-client-1");
|
||||
const client3 = createTestClient("test-client-2");
|
||||
|
||||
expect(sseManager.addClient(client1, validToken)).toBeTruthy();
|
||||
expect(sseManager.addClient(client2, validToken)).toBeTruthy();
|
||||
expect(sseManager.addClient(client3, validToken)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
describe("Client Management", () => {
|
||||
it("should track client connections", () => {
|
||||
const client = createTestClient("test-client");
|
||||
sseManager.addClient(client, validToken);
|
||||
|
||||
const stats = sseManager.getStatistics();
|
||||
expect(stats.totalClients).toBe(1);
|
||||
expect(stats.authenticatedClients).toBe(1);
|
||||
});
|
||||
|
||||
it("should remove disconnected clients", () => {
|
||||
const client = createTestClient("test-client");
|
||||
sseManager.addClient(client, validToken);
|
||||
sseManager.removeClient("test-client");
|
||||
|
||||
const stats = sseManager.getStatistics();
|
||||
expect(stats.totalClients).toBe(0);
|
||||
});
|
||||
|
||||
it("should cleanup inactive clients", async () => {
|
||||
const client = createTestClient("test-client");
|
||||
sseManager.addClient(client, validToken);
|
||||
|
||||
// Wait for cleanup interval
|
||||
await new Promise((resolve) => setTimeout(resolve, 250));
|
||||
|
||||
const stats = sseManager.getStatistics();
|
||||
expect(stats.totalClients).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Rate Limiting", () => {
|
||||
it("should enforce rate limits for message sending", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
// Send messages up to the limit
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "first" },
|
||||
});
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "second" },
|
||||
});
|
||||
|
||||
// Next message should be rate limited
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "overflow" },
|
||||
});
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(2);
|
||||
});
|
||||
|
||||
it("should reset rate limits after window expires", async () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
// Send messages up to the limit
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "first" },
|
||||
});
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "second" },
|
||||
});
|
||||
|
||||
// Wait for rate limit window to expire
|
||||
await new Promise((resolve) => setTimeout(resolve, 1100));
|
||||
|
||||
// Should be able to send messages again
|
||||
sseManager["sendToClient"](sseClient!, {
|
||||
type: "test",
|
||||
data: { value: "new message" },
|
||||
});
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe("Event Broadcasting", () => {
|
||||
it("should only send events to authenticated clients", () => {
|
||||
const client1 = createTestClient("client1");
|
||||
const client2 = createTestClient("client2");
|
||||
|
||||
const sseClient1 = sseManager.addClient(client1, validToken);
|
||||
const sseClient2 = sseManager.addClient(client2, "invalid_token");
|
||||
|
||||
expect(sseClient1).toBeTruthy();
|
||||
expect(sseClient2).toBeNull();
|
||||
|
||||
sseClient1!.subscriptions.add("event:test_event");
|
||||
|
||||
const event = {
|
||||
event_type: "test_event",
|
||||
data: { value: "test" },
|
||||
origin: "test",
|
||||
time_fired: new Date().toISOString(),
|
||||
context: { id: "test" },
|
||||
};
|
||||
|
||||
sseManager.broadcastEvent(event);
|
||||
|
||||
const client1SendMock = client1.send as Mock<(data: string) => void>;
|
||||
const client2SendMock = client2.send as Mock<(data: string) => void>;
|
||||
|
||||
expect(client1SendMock.mock.calls.length).toBe(1);
|
||||
expect(client2SendMock.mock.calls.length).toBe(0);
|
||||
});
|
||||
|
||||
it("should respect subscription filters", () => {
|
||||
const client = createTestClient("test-client");
|
||||
const sseClient = sseManager.addClient(client, validToken);
|
||||
expect(sseClient).toBeTruthy();
|
||||
|
||||
sseClient!.subscriptions.add("event:test_event");
|
||||
|
||||
// Send matching event
|
||||
sseManager.broadcastEvent({
|
||||
event_type: "test_event",
|
||||
data: { value: "test" },
|
||||
origin: "test",
|
||||
time_fired: new Date().toISOString(),
|
||||
context: { id: "test" },
|
||||
});
|
||||
|
||||
// Send non-matching event
|
||||
sseManager.broadcastEvent({
|
||||
event_type: "other_event",
|
||||
data: { value: "test" },
|
||||
origin: "test",
|
||||
time_fired: new Date().toISOString(),
|
||||
context: { id: "test" },
|
||||
});
|
||||
|
||||
const sendMock = client.send as Mock<(data: string) => void>;
|
||||
expect(sendMock.mock.calls.length).toBe(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
505
src/sse/index.ts
505
src/sse/index.ts
@@ -1,71 +1,118 @@
|
||||
import { EventEmitter } from 'events';
|
||||
import { HassEntity, HassEvent } from '../interfaces/hass.js';
|
||||
import { TokenManager } from '../security/index.js';
|
||||
import { EventEmitter } from "events";
|
||||
import { HassEntity, HassEvent } from "../interfaces/hass.js";
|
||||
import { TokenManager } from "../security/index.js";
|
||||
|
||||
// Constants
|
||||
const DEFAULT_MAX_CLIENTS = 1000;
|
||||
const DEFAULT_PING_INTERVAL = 30000; // 30 seconds
|
||||
const DEFAULT_CLEANUP_INTERVAL = 60000; // 1 minute
|
||||
const DEFAULT_MAX_CONNECTION_AGE = 24 * 60 * 60 * 1000; // 24 hours
|
||||
const DEFAULT_RATE_LIMIT = {
|
||||
MAX_MESSAGES: 100, // messages
|
||||
WINDOW_MS: 60000, // 1 minute
|
||||
BURST_LIMIT: 10, // max messages per second
|
||||
};
|
||||
|
||||
interface RateLimit {
|
||||
count: number;
|
||||
lastReset: number;
|
||||
burstCount: number;
|
||||
lastBurstReset: number;
|
||||
}
|
||||
|
||||
export interface SSEClient {
|
||||
id: string;
|
||||
send: (data: string) => void;
|
||||
subscriptions: {
|
||||
entities: Set<string>;
|
||||
events: Set<string>;
|
||||
domains: Set<string>;
|
||||
};
|
||||
ip: string;
|
||||
connectedAt: Date;
|
||||
lastPingAt?: Date;
|
||||
subscriptions: Set<string>;
|
||||
authenticated: boolean;
|
||||
send: (data: string) => void;
|
||||
rateLimit: RateLimit;
|
||||
lastPing: number;
|
||||
connectionTime: number;
|
||||
}
|
||||
|
||||
interface ClientStats {
|
||||
id: string;
|
||||
ip: string;
|
||||
connectedAt: Date;
|
||||
lastPingAt?: Date;
|
||||
subscriptionCount: number;
|
||||
connectionDuration: number;
|
||||
messagesSent: number;
|
||||
lastActivity: Date;
|
||||
}
|
||||
|
||||
export class SSEManager extends EventEmitter {
|
||||
private clients: Map<string, SSEClient> = new Map();
|
||||
private static instance: SSEManager | null = null;
|
||||
private entityStates: Map<string, HassEntity> = new Map();
|
||||
private readonly maxClients: number;
|
||||
private readonly pingInterval: number;
|
||||
private readonly cleanupInterval: number;
|
||||
private readonly maxConnectionAge: number;
|
||||
private readonly rateLimit: typeof DEFAULT_RATE_LIMIT;
|
||||
|
||||
// Configuration
|
||||
private readonly MAX_CLIENTS = 100;
|
||||
private readonly RATE_LIMIT_WINDOW = 60000; // 1 minute
|
||||
private readonly RATE_LIMIT_MAX_REQUESTS = 1000;
|
||||
private readonly CLIENT_TIMEOUT = 300000; // 5 minutes
|
||||
private readonly PING_INTERVAL = 30000; // 30 seconds
|
||||
|
||||
private constructor() {
|
||||
constructor(
|
||||
options: {
|
||||
maxClients?: number;
|
||||
pingInterval?: number;
|
||||
cleanupInterval?: number;
|
||||
maxConnectionAge?: number;
|
||||
rateLimit?: Partial<typeof DEFAULT_RATE_LIMIT>;
|
||||
} = {},
|
||||
) {
|
||||
super();
|
||||
console.log('Initializing SSE Manager...');
|
||||
this.startMaintenanceInterval();
|
||||
this.maxClients = options.maxClients || DEFAULT_MAX_CLIENTS;
|
||||
this.pingInterval = options.pingInterval || DEFAULT_PING_INTERVAL;
|
||||
this.cleanupInterval = options.cleanupInterval || DEFAULT_CLEANUP_INTERVAL;
|
||||
this.maxConnectionAge =
|
||||
options.maxConnectionAge || DEFAULT_MAX_CONNECTION_AGE;
|
||||
this.rateLimit = { ...DEFAULT_RATE_LIMIT, ...options.rateLimit };
|
||||
|
||||
console.log("Initializing SSE Manager...");
|
||||
this.startMaintenanceTasks();
|
||||
}
|
||||
|
||||
private startMaintenanceInterval() {
|
||||
private startMaintenanceTasks(): void {
|
||||
// Send periodic pings to keep connections alive
|
||||
setInterval(() => {
|
||||
this.performMaintenance();
|
||||
}, 60000); // Run every minute
|
||||
this.clients.forEach((client) => {
|
||||
if (!this.isRateLimited(client)) {
|
||||
try {
|
||||
client.send(
|
||||
JSON.stringify({
|
||||
type: "ping",
|
||||
timestamp: new Date().toISOString(),
|
||||
}),
|
||||
);
|
||||
client.lastPingAt = new Date();
|
||||
} catch (error) {
|
||||
console.error(`Failed to ping client ${client.id}:`, error);
|
||||
this.removeClient(client.id);
|
||||
}
|
||||
}
|
||||
});
|
||||
}, this.pingInterval);
|
||||
|
||||
private performMaintenance() {
|
||||
// Cleanup inactive or expired connections
|
||||
setInterval(() => {
|
||||
const now = Date.now();
|
||||
this.clients.forEach((client, clientId) => {
|
||||
const connectionAge = now - client.connectedAt.getTime();
|
||||
const lastPingAge = client.lastPingAt
|
||||
? now - client.lastPingAt.getTime()
|
||||
: 0;
|
||||
|
||||
// Check each client for timeouts and rate limits
|
||||
for (const [clientId, client] of this.clients.entries()) {
|
||||
// Remove inactive clients
|
||||
if (now - client.lastPing > this.CLIENT_TIMEOUT) {
|
||||
console.log(`Removing inactive client: ${clientId}`);
|
||||
if (
|
||||
connectionAge > this.maxConnectionAge ||
|
||||
lastPingAge > this.pingInterval * 2
|
||||
) {
|
||||
console.log(`Removing inactive client ${clientId}`);
|
||||
this.removeClient(clientId);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Reset rate limits if window has passed
|
||||
if (now - client.rateLimit.lastReset > this.RATE_LIMIT_WINDOW) {
|
||||
client.rateLimit.count = 0;
|
||||
client.rateLimit.lastReset = now;
|
||||
}
|
||||
}
|
||||
|
||||
// Log statistics
|
||||
console.log(`Maintenance complete - Active clients: ${this.clients.size}`);
|
||||
});
|
||||
}, this.cleanupInterval);
|
||||
}
|
||||
|
||||
static getInstance(): SSEManager {
|
||||
@@ -75,314 +122,264 @@ export class SSEManager extends EventEmitter {
|
||||
return SSEManager.instance;
|
||||
}
|
||||
|
||||
addClient(client: { id: string; send: (data: string) => void }, token?: string): SSEClient | null {
|
||||
// Check maximum client limit
|
||||
if (this.clients.size >= this.MAX_CLIENTS) {
|
||||
console.warn('Maximum client limit reached, rejecting new connection');
|
||||
addClient(
|
||||
client: Omit<SSEClient, "authenticated" | "subscriptions" | "rateLimit">,
|
||||
token: string,
|
||||
): SSEClient | null {
|
||||
// Validate token
|
||||
const validationResult = TokenManager.validateToken(token, client.ip);
|
||||
if (!validationResult.valid) {
|
||||
console.warn(
|
||||
`Invalid token for client ${client.id} from IP ${client.ip}: ${validationResult.error}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const sseClient: SSEClient = {
|
||||
id: client.id,
|
||||
send: client.send,
|
||||
subscriptions: {
|
||||
entities: new Set<string>(),
|
||||
events: new Set<string>(),
|
||||
domains: new Set<string>()
|
||||
},
|
||||
authenticated: this.validateToken(token),
|
||||
// Check client limit
|
||||
if (this.clients.size >= this.maxClients) {
|
||||
console.warn(`Maximum client limit (${this.maxClients}) reached`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Create new client with authentication and subscriptions
|
||||
const newClient: SSEClient = {
|
||||
...client,
|
||||
authenticated: true,
|
||||
subscriptions: new Set(),
|
||||
lastPingAt: new Date(),
|
||||
rateLimit: {
|
||||
count: 0,
|
||||
lastReset: now
|
||||
lastReset: Date.now(),
|
||||
burstCount: 0,
|
||||
lastBurstReset: Date.now(),
|
||||
},
|
||||
lastPing: now,
|
||||
connectionTime: now
|
||||
};
|
||||
|
||||
this.clients.set(client.id, sseClient);
|
||||
console.log(`SSE client connected: ${client.id} (authenticated: ${sseClient.authenticated})`);
|
||||
this.clients.set(client.id, newClient);
|
||||
console.log(`New client ${client.id} connected from IP ${client.ip}`);
|
||||
|
||||
// Start ping interval for this client
|
||||
this.startClientPing(client.id);
|
||||
|
||||
// Send initial connection success message
|
||||
this.sendToClient(sseClient, {
|
||||
type: 'connection',
|
||||
status: 'connected',
|
||||
id: client.id,
|
||||
authenticated: sseClient.authenticated,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
|
||||
return sseClient;
|
||||
return newClient;
|
||||
}
|
||||
|
||||
private startClientPing(clientId: string) {
|
||||
const interval = setInterval(() => {
|
||||
const client = this.clients.get(clientId);
|
||||
if (!client) {
|
||||
clearInterval(interval);
|
||||
return;
|
||||
private isRateLimited(client: SSEClient): boolean {
|
||||
const now = Date.now();
|
||||
|
||||
// Reset window counters if needed
|
||||
if (now - client.rateLimit.lastReset >= this.rateLimit.WINDOW_MS) {
|
||||
client.rateLimit.count = 0;
|
||||
client.rateLimit.lastReset = now;
|
||||
}
|
||||
|
||||
this.sendToClient(client, {
|
||||
type: 'ping',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}, this.PING_INTERVAL);
|
||||
// Reset burst counters if needed (every second)
|
||||
if (now - client.rateLimit.lastBurstReset >= 1000) {
|
||||
client.rateLimit.burstCount = 0;
|
||||
client.rateLimit.lastBurstReset = now;
|
||||
}
|
||||
|
||||
removeClient(clientId: string) {
|
||||
// Check both window and burst limits
|
||||
return (
|
||||
client.rateLimit.count >= this.rateLimit.MAX_MESSAGES ||
|
||||
client.rateLimit.burstCount >= this.rateLimit.BURST_LIMIT
|
||||
);
|
||||
}
|
||||
|
||||
private updateRateLimit(client: SSEClient): void {
|
||||
const now = Date.now();
|
||||
client.rateLimit.count++;
|
||||
client.rateLimit.burstCount++;
|
||||
|
||||
// Update timestamps if needed
|
||||
if (now - client.rateLimit.lastReset >= this.rateLimit.WINDOW_MS) {
|
||||
client.rateLimit.lastReset = now;
|
||||
client.rateLimit.count = 1;
|
||||
}
|
||||
|
||||
if (now - client.rateLimit.lastBurstReset >= 1000) {
|
||||
client.rateLimit.lastBurstReset = now;
|
||||
client.rateLimit.burstCount = 1;
|
||||
}
|
||||
}
|
||||
|
||||
removeClient(clientId: string): void {
|
||||
if (this.clients.has(clientId)) {
|
||||
this.clients.delete(clientId);
|
||||
console.log(`SSE client disconnected: ${clientId}`);
|
||||
this.emit("client_disconnected", {
|
||||
clientId,
|
||||
timestamp: new Date().toISOString(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
subscribeToEntity(clientId: string, entityId: string) {
|
||||
subscribeToEntity(clientId: string, entityId: string): void {
|
||||
const client = this.clients.get(clientId);
|
||||
if (client?.authenticated) {
|
||||
client.subscriptions.entities.add(entityId);
|
||||
if (!client?.authenticated) {
|
||||
console.warn(
|
||||
`Unauthenticated client ${clientId} attempted to subscribe to entity: ${entityId}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
client.subscriptions.add(`entity:${entityId}`);
|
||||
console.log(`Client ${clientId} subscribed to entity: ${entityId}`);
|
||||
|
||||
// Send current state if available
|
||||
const currentState = this.entityStates.get(entityId);
|
||||
if (currentState) {
|
||||
if (currentState && !this.isRateLimited(client)) {
|
||||
this.sendToClient(client, {
|
||||
type: 'state_changed',
|
||||
type: "state_changed",
|
||||
data: {
|
||||
entity_id: currentState.entity_id,
|
||||
state: currentState.state,
|
||||
attributes: currentState.attributes,
|
||||
last_changed: currentState.last_changed,
|
||||
last_updated: currentState.last_updated
|
||||
}
|
||||
last_updated: currentState.last_updated,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
subscribeToDomain(clientId: string, domain: string): void {
|
||||
const client = this.clients.get(clientId);
|
||||
if (!client?.authenticated) {
|
||||
console.warn(
|
||||
`Unauthenticated client ${clientId} attempted to subscribe to domain: ${domain}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
subscribeToDomain(clientId: string, domain: string) {
|
||||
const client = this.clients.get(clientId);
|
||||
if (client?.authenticated) {
|
||||
client.subscriptions.domains.add(domain);
|
||||
client.subscriptions.add(`domain:${domain}`);
|
||||
console.log(`Client ${clientId} subscribed to domain: ${domain}`);
|
||||
}
|
||||
|
||||
subscribeToEvent(clientId: string, eventType: string): void {
|
||||
const client = this.clients.get(clientId);
|
||||
if (!client?.authenticated) {
|
||||
console.warn(
|
||||
`Unauthenticated client ${clientId} attempted to subscribe to event: ${eventType}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
subscribeToEvent(clientId: string, eventType: string) {
|
||||
const client = this.clients.get(clientId);
|
||||
if (client?.authenticated) {
|
||||
client.subscriptions.events.add(eventType);
|
||||
client.subscriptions.add(`event:${eventType}`);
|
||||
console.log(`Client ${clientId} subscribed to event: ${eventType}`);
|
||||
}
|
||||
}
|
||||
|
||||
broadcastStateChange(entity: HassEntity) {
|
||||
broadcastStateChange(entity: HassEntity): void {
|
||||
// Update stored state
|
||||
this.entityStates.set(entity.entity_id, entity);
|
||||
|
||||
const domain = entity.entity_id.split('.')[0];
|
||||
const domain = entity.entity_id.split(".")[0];
|
||||
const message = {
|
||||
type: 'state_changed',
|
||||
type: "state_changed",
|
||||
data: {
|
||||
entity_id: entity.entity_id,
|
||||
state: entity.state,
|
||||
attributes: entity.attributes,
|
||||
last_changed: entity.last_changed,
|
||||
last_updated: entity.last_updated
|
||||
last_updated: entity.last_updated,
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
console.log(`Broadcasting state change for ${entity.entity_id}`);
|
||||
|
||||
// Send to relevant subscribers only
|
||||
for (const client of this.clients.values()) {
|
||||
if (!client.authenticated) continue;
|
||||
this.clients.forEach((client) => {
|
||||
if (!client.authenticated || this.isRateLimited(client)) return;
|
||||
|
||||
if (
|
||||
client.subscriptions.entities.has(entity.entity_id) ||
|
||||
client.subscriptions.domains.has(domain) ||
|
||||
client.subscriptions.events.has('state_changed')
|
||||
client.subscriptions.has(`entity:${entity.entity_id}`) ||
|
||||
client.subscriptions.has(`domain:${domain}`) ||
|
||||
client.subscriptions.has("event:state_changed")
|
||||
) {
|
||||
this.sendToClient(client, message);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
broadcastEvent(event: HassEvent) {
|
||||
broadcastEvent(event: HassEvent): void {
|
||||
const message = {
|
||||
type: event.event_type,
|
||||
data: event.data,
|
||||
origin: event.origin,
|
||||
time_fired: event.time_fired,
|
||||
context: event.context,
|
||||
timestamp: new Date().toISOString()
|
||||
timestamp: new Date().toISOString(),
|
||||
};
|
||||
|
||||
console.log(`Broadcasting event: ${event.event_type}`);
|
||||
|
||||
// Send to relevant subscribers only
|
||||
for (const client of this.clients.values()) {
|
||||
if (!client.authenticated) continue;
|
||||
this.clients.forEach((client) => {
|
||||
if (!client.authenticated || this.isRateLimited(client)) return;
|
||||
|
||||
if (client.subscriptions.events.has(event.event_type)) {
|
||||
if (client.subscriptions.has(`event:${event.event_type}`)) {
|
||||
this.sendToClient(client, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sendToClient(client: SSEClient, data: any) {
|
||||
try {
|
||||
// Check rate limit
|
||||
const now = Date.now();
|
||||
if (now - client.rateLimit.lastReset > this.RATE_LIMIT_WINDOW) {
|
||||
client.rateLimit.count = 0;
|
||||
client.rateLimit.lastReset = now;
|
||||
}
|
||||
|
||||
if (client.rateLimit.count >= this.RATE_LIMIT_MAX_REQUESTS) {
|
||||
console.warn(`Rate limit exceeded for client ${client.id}`);
|
||||
this.sendToClient(client, {
|
||||
type: 'error',
|
||||
error: 'rate_limit_exceeded',
|
||||
message: 'Too many requests, please try again later',
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
|
||||
private sendToClient(client: SSEClient, data: unknown): void {
|
||||
try {
|
||||
if (!client.authenticated) {
|
||||
console.warn(
|
||||
`Attempted to send message to unauthenticated client ${client.id}`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
client.rateLimit.count++;
|
||||
client.lastPing = now;
|
||||
client.send(JSON.stringify(data));
|
||||
if (this.isRateLimited(client)) {
|
||||
console.warn(`Rate limit exceeded for client ${client.id}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const message = typeof data === "string" ? data : JSON.stringify(data);
|
||||
client.send(message);
|
||||
this.updateRateLimit(client);
|
||||
} catch (error) {
|
||||
console.error(`Error sending message to client ${client.id}:`, error);
|
||||
console.error(`Failed to send message to client ${client.id}:`, error);
|
||||
this.removeClient(client.id);
|
||||
}
|
||||
}
|
||||
|
||||
private validateToken(token?: string): boolean {
|
||||
if (!token) return false;
|
||||
return TokenManager.validateToken(token);
|
||||
}
|
||||
|
||||
// Utility methods
|
||||
getConnectedClients(): number {
|
||||
return this.clients.size;
|
||||
}
|
||||
|
||||
getClientSubscriptions(clientId: string) {
|
||||
return this.clients.get(clientId)?.subscriptions;
|
||||
}
|
||||
|
||||
getEntityState(entityId: string): HassEntity | undefined {
|
||||
return this.entityStates.get(entityId);
|
||||
}
|
||||
|
||||
// Add new event types
|
||||
broadcastServiceCall(domain: string, service: string, data: any) {
|
||||
const message = {
|
||||
type: 'service_called',
|
||||
data: {
|
||||
domain,
|
||||
service,
|
||||
service_data: data
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.broadcastToSubscribers('service_called', message);
|
||||
}
|
||||
|
||||
broadcastAutomationTriggered(automationId: string, trigger: any) {
|
||||
const message = {
|
||||
type: 'automation_triggered',
|
||||
data: {
|
||||
automation_id: automationId,
|
||||
trigger
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.broadcastToSubscribers('automation_triggered', message);
|
||||
}
|
||||
|
||||
broadcastScriptExecuted(scriptId: string, data: any) {
|
||||
const message = {
|
||||
type: 'script_executed',
|
||||
data: {
|
||||
script_id: scriptId,
|
||||
execution_data: data
|
||||
},
|
||||
timestamp: new Date().toISOString()
|
||||
};
|
||||
|
||||
this.broadcastToSubscribers('script_executed', message);
|
||||
}
|
||||
|
||||
private broadcastToSubscribers(eventType: string, message: any) {
|
||||
for (const client of this.clients.values()) {
|
||||
if (!client.authenticated) continue;
|
||||
|
||||
if (client.subscriptions.events.has(eventType)) {
|
||||
this.sendToClient(client, message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add statistics methods
|
||||
getStatistics() {
|
||||
getStatistics(): {
|
||||
totalClients: number;
|
||||
authenticatedClients: number;
|
||||
clientStats: ClientStats[];
|
||||
subscriptionStats: { [key: string]: number };
|
||||
} {
|
||||
const now = Date.now();
|
||||
const stats = {
|
||||
total_clients: this.clients.size,
|
||||
authenticated_clients: 0,
|
||||
total_subscriptions: 0,
|
||||
clients_by_connection_time: {
|
||||
less_than_1m: 0,
|
||||
less_than_5m: 0,
|
||||
less_than_1h: 0,
|
||||
more_than_1h: 0
|
||||
},
|
||||
total_entities_tracked: this.entityStates.size,
|
||||
subscriptions: {
|
||||
entities: new Set<string>(),
|
||||
events: new Set<string>(),
|
||||
domains: new Set<string>()
|
||||
}
|
||||
};
|
||||
const clientStats: ClientStats[] = [];
|
||||
const subscriptionStats: { [key: string]: number } = {};
|
||||
let authenticatedClients = 0;
|
||||
|
||||
for (const client of this.clients.values()) {
|
||||
if (client.authenticated) stats.authenticated_clients++;
|
||||
|
||||
// Count subscriptions
|
||||
stats.total_subscriptions +=
|
||||
client.subscriptions.entities.size +
|
||||
client.subscriptions.events.size +
|
||||
client.subscriptions.domains.size;
|
||||
|
||||
// Add to subscription sets
|
||||
client.subscriptions.entities.forEach(entity => stats.subscriptions.entities.add(entity));
|
||||
client.subscriptions.events.forEach(event => stats.subscriptions.events.add(event));
|
||||
client.subscriptions.domains.forEach(domain => stats.subscriptions.domains.add(domain));
|
||||
|
||||
// Calculate connection duration
|
||||
const connectionDuration = now - client.connectionTime;
|
||||
if (connectionDuration < 60000) stats.clients_by_connection_time.less_than_1m++;
|
||||
else if (connectionDuration < 300000) stats.clients_by_connection_time.less_than_5m++;
|
||||
else if (connectionDuration < 3600000) stats.clients_by_connection_time.less_than_1h++;
|
||||
else stats.clients_by_connection_time.more_than_1h++;
|
||||
this.clients.forEach((client) => {
|
||||
if (client.authenticated) {
|
||||
authenticatedClients++;
|
||||
}
|
||||
|
||||
// Convert Sets to Arrays for JSON serialization
|
||||
clientStats.push({
|
||||
id: client.id,
|
||||
ip: client.ip,
|
||||
connectedAt: client.connectedAt,
|
||||
lastPingAt: client.lastPingAt,
|
||||
subscriptionCount: client.subscriptions.size,
|
||||
connectionDuration: now - client.connectedAt.getTime(),
|
||||
messagesSent: client.rateLimit.count,
|
||||
lastActivity: new Date(client.rateLimit.lastReset),
|
||||
});
|
||||
|
||||
client.subscriptions.forEach((sub) => {
|
||||
subscriptionStats[sub] = (subscriptionStats[sub] || 0) + 1;
|
||||
});
|
||||
});
|
||||
|
||||
return {
|
||||
...stats,
|
||||
subscriptions: {
|
||||
entities: Array.from(stats.subscriptions.entities),
|
||||
events: Array.from(stats.subscriptions.events),
|
||||
domains: Array.from(stats.subscriptions.domains)
|
||||
}
|
||||
totalClients: this.clients.size,
|
||||
authenticatedClients,
|
||||
clientStats,
|
||||
subscriptionStats,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
67
src/sse/types.ts
Normal file
67
src/sse/types.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import type { Mock } from "bun:test";
|
||||
|
||||
export interface SSEClient {
|
||||
id: string;
|
||||
ip: string;
|
||||
connectedAt: Date;
|
||||
send: Mock<(data: string) => void>;
|
||||
rateLimit: {
|
||||
count: number;
|
||||
lastReset: number;
|
||||
};
|
||||
connectionTime: number;
|
||||
}
|
||||
|
||||
export interface HassEventData {
|
||||
[key: string]: unknown;
|
||||
}
|
||||
|
||||
export interface SSEEvent {
|
||||
event_type: string;
|
||||
data: HassEventData;
|
||||
origin: string;
|
||||
time_fired: string;
|
||||
context: {
|
||||
id: string;
|
||||
[key: string]: unknown;
|
||||
};
|
||||
}
|
||||
|
||||
export interface SSEMessage {
|
||||
type: string;
|
||||
data?: unknown;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
export interface SSEManagerConfig {
|
||||
maxClients?: number;
|
||||
pingInterval?: number;
|
||||
cleanupInterval?: number;
|
||||
maxConnectionAge?: number;
|
||||
rateLimitWindow?: number;
|
||||
maxRequestsPerWindow?: number;
|
||||
}
|
||||
|
||||
export type MockSendFn = (data: string) => void;
|
||||
export type MockSend = Mock<MockSendFn>;
|
||||
|
||||
export type ValidateTokenFn = (
|
||||
token: string,
|
||||
ip?: string,
|
||||
) => { valid: boolean; error?: string };
|
||||
export type MockValidateToken = Mock<ValidateTokenFn>;
|
||||
|
||||
// Type guard for mock functions
|
||||
export function isMockFunction(value: unknown): value is Mock<unknown> {
|
||||
return typeof value === "function" && "mock" in value;
|
||||
}
|
||||
|
||||
// Safe type assertion for mock objects
|
||||
export function asMockFunction<T extends (...args: any[]) => any>(
|
||||
value: unknown,
|
||||
): Mock<T> {
|
||||
if (!isMockFunction(value)) {
|
||||
throw new Error("Value is not a mock function");
|
||||
}
|
||||
return value as Mock<T>;
|
||||
}
|
||||
@@ -1,31 +1,54 @@
|
||||
import { z } from 'zod';
|
||||
import { Tool, AddonParams, HassAddonResponse, HassAddonInfoResponse } from '../types/index.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { z } from "zod";
|
||||
import {
|
||||
Tool,
|
||||
AddonParams,
|
||||
HassAddonResponse,
|
||||
HassAddonInfoResponse,
|
||||
} from "../types/index.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
export const addonTool: Tool = {
|
||||
name: 'addon',
|
||||
description: 'Manage Home Assistant add-ons',
|
||||
name: "addon",
|
||||
description: "Manage Home Assistant add-ons",
|
||||
parameters: z.object({
|
||||
action: z.enum(['list', 'info', 'install', 'uninstall', 'start', 'stop', 'restart'])
|
||||
.describe('Action to perform with add-on'),
|
||||
slug: z.string().optional().describe('Add-on slug (required for all actions except list)'),
|
||||
version: z.string().optional().describe('Version to install (only for install action)'),
|
||||
action: z
|
||||
.enum([
|
||||
"list",
|
||||
"info",
|
||||
"install",
|
||||
"uninstall",
|
||||
"start",
|
||||
"stop",
|
||||
"restart",
|
||||
])
|
||||
.describe("Action to perform with add-on"),
|
||||
slug: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Add-on slug (required for all actions except list)"),
|
||||
version: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Version to install (only for install action)"),
|
||||
}),
|
||||
execute: async (params: AddonParams) => {
|
||||
try {
|
||||
if (params.action === 'list') {
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/hassio/store`, {
|
||||
if (params.action === "list") {
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/hassio/store`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch add-ons: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json() as HassAddonResponse;
|
||||
const data = (await response.json()) as HassAddonResponse;
|
||||
return {
|
||||
success: true,
|
||||
addons: data.data.addons.map((addon) => ({
|
||||
@@ -40,39 +63,39 @@ export const addonTool: Tool = {
|
||||
};
|
||||
} else {
|
||||
if (!params.slug) {
|
||||
throw new Error('Add-on slug is required for this action');
|
||||
throw new Error("Add-on slug is required for this action");
|
||||
}
|
||||
|
||||
let endpoint = '';
|
||||
let method = 'GET';
|
||||
let endpoint = "";
|
||||
let method = "GET";
|
||||
const body: Record<string, any> = {};
|
||||
|
||||
switch (params.action) {
|
||||
case 'info':
|
||||
case "info":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/info`;
|
||||
break;
|
||||
case 'install':
|
||||
case "install":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/install`;
|
||||
method = 'POST';
|
||||
method = "POST";
|
||||
if (params.version) {
|
||||
body.version = params.version;
|
||||
}
|
||||
break;
|
||||
case 'uninstall':
|
||||
case "uninstall":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/uninstall`;
|
||||
method = 'POST';
|
||||
method = "POST";
|
||||
break;
|
||||
case 'start':
|
||||
case "start":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/start`;
|
||||
method = 'POST';
|
||||
method = "POST";
|
||||
break;
|
||||
case 'stop':
|
||||
case "stop":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/stop`;
|
||||
method = 'POST';
|
||||
method = "POST";
|
||||
break;
|
||||
case 'restart':
|
||||
case "restart":
|
||||
endpoint = `/api/hassio/addons/${params.slug}/restart`;
|
||||
method = 'POST';
|
||||
method = "POST";
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -80,16 +103,18 @@ export const addonTool: Tool = {
|
||||
method,
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
...(Object.keys(body).length > 0 && { body: JSON.stringify(body) }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to ${params.action} add-on: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to ${params.action} add-on: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const data = await response.json() as HassAddonInfoResponse;
|
||||
const data = (await response.json()) as HassAddonInfoResponse;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully ${params.action}ed add-on ${params.slug}`,
|
||||
@@ -99,7 +124,8 @@ export const addonTool: Tool = {
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,95 +1,136 @@
|
||||
import { z } from 'zod';
|
||||
import { Tool, AutomationConfigParams, AutomationConfig, AutomationResponse } from '../types/index.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { z } from "zod";
|
||||
import {
|
||||
Tool,
|
||||
AutomationConfigParams,
|
||||
AutomationConfig,
|
||||
AutomationResponse,
|
||||
} from "../types/index.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
export const automationConfigTool: Tool = {
|
||||
name: 'automation_config',
|
||||
description: 'Advanced automation configuration and management',
|
||||
name: "automation_config",
|
||||
description: "Advanced automation configuration and management",
|
||||
parameters: z.object({
|
||||
action: z.enum(['create', 'update', 'delete', 'duplicate'])
|
||||
.describe('Action to perform with automation config'),
|
||||
automation_id: z.string().optional()
|
||||
.describe('Automation ID (required for update, delete, and duplicate)'),
|
||||
config: z.object({
|
||||
alias: z.string().describe('Friendly name for the automation'),
|
||||
description: z.string().optional().describe('Description of what the automation does'),
|
||||
mode: z.enum(['single', 'parallel', 'queued', 'restart']).optional()
|
||||
.describe('How multiple triggerings are handled'),
|
||||
trigger: z.array(z.any()).describe('List of triggers'),
|
||||
condition: z.array(z.any()).optional().describe('List of conditions'),
|
||||
action: z.array(z.any()).describe('List of actions'),
|
||||
}).optional().describe('Automation configuration (required for create and update)'),
|
||||
action: z
|
||||
.enum(["create", "update", "delete", "duplicate"])
|
||||
.describe("Action to perform with automation config"),
|
||||
automation_id: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Automation ID (required for update, delete, and duplicate)"),
|
||||
config: z
|
||||
.object({
|
||||
alias: z.string().describe("Friendly name for the automation"),
|
||||
description: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Description of what the automation does"),
|
||||
mode: z
|
||||
.enum(["single", "parallel", "queued", "restart"])
|
||||
.optional()
|
||||
.describe("How multiple triggerings are handled"),
|
||||
trigger: z.array(z.any()).describe("List of triggers"),
|
||||
condition: z.array(z.any()).optional().describe("List of conditions"),
|
||||
action: z.array(z.any()).describe("List of actions"),
|
||||
})
|
||||
.optional()
|
||||
.describe("Automation configuration (required for create and update)"),
|
||||
}),
|
||||
execute: async (params: AutomationConfigParams) => {
|
||||
try {
|
||||
switch (params.action) {
|
||||
case 'create': {
|
||||
case "create": {
|
||||
if (!params.config) {
|
||||
throw new Error('Configuration is required for creating automation');
|
||||
throw new Error(
|
||||
"Configuration is required for creating automation",
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/config/automation/config`, {
|
||||
method: 'POST',
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/config/automation/config`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(params.config),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to create automation: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to create automation: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const responseData = await response.json() as { automation_id: string };
|
||||
const responseData = (await response.json()) as {
|
||||
automation_id: string;
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
message: 'Successfully created automation',
|
||||
message: "Successfully created automation",
|
||||
automation_id: responseData.automation_id,
|
||||
};
|
||||
}
|
||||
|
||||
case 'update': {
|
||||
case "update": {
|
||||
if (!params.automation_id || !params.config) {
|
||||
throw new Error('Automation ID and configuration are required for updating automation');
|
||||
throw new Error(
|
||||
"Automation ID and configuration are required for updating automation",
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`, {
|
||||
method: 'PUT',
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`,
|
||||
{
|
||||
method: "PUT",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(params.config),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to update automation: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to update automation: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const responseData = await response.json() as { automation_id: string };
|
||||
const responseData = (await response.json()) as {
|
||||
automation_id: string;
|
||||
};
|
||||
return {
|
||||
success: true,
|
||||
automation_id: responseData.automation_id,
|
||||
message: 'Automation updated successfully'
|
||||
message: "Automation updated successfully",
|
||||
};
|
||||
}
|
||||
|
||||
case 'delete': {
|
||||
case "delete": {
|
||||
if (!params.automation_id) {
|
||||
throw new Error('Automation ID is required for deleting automation');
|
||||
throw new Error(
|
||||
"Automation ID is required for deleting automation",
|
||||
);
|
||||
}
|
||||
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`, {
|
||||
method: 'DELETE',
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`,
|
||||
{
|
||||
method: "DELETE",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to delete automation: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to delete automation: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -98,41 +139,54 @@ export const automationConfigTool: Tool = {
|
||||
};
|
||||
}
|
||||
|
||||
case 'duplicate': {
|
||||
case "duplicate": {
|
||||
if (!params.automation_id) {
|
||||
throw new Error('Automation ID is required for duplicating automation');
|
||||
throw new Error(
|
||||
"Automation ID is required for duplicating automation",
|
||||
);
|
||||
}
|
||||
|
||||
// First, get the existing automation config
|
||||
const getResponse = await fetch(`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`, {
|
||||
const getResponse = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/config/automation/config/${params.automation_id}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!getResponse.ok) {
|
||||
throw new Error(`Failed to get automation config: ${getResponse.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to get automation config: ${getResponse.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const config = await getResponse.json() as AutomationConfig;
|
||||
const config = (await getResponse.json()) as AutomationConfig;
|
||||
config.alias = `${config.alias} (Copy)`;
|
||||
|
||||
// Create new automation with modified config
|
||||
const createResponse = await fetch(`${APP_CONFIG.HASS_HOST}/api/config/automation/config`, {
|
||||
method: 'POST',
|
||||
const createResponse = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/config/automation/config`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(config),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!createResponse.ok) {
|
||||
throw new Error(`Failed to create duplicate automation: ${createResponse.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to create duplicate automation: ${createResponse.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const newAutomation = await createResponse.json() as AutomationResponse;
|
||||
const newAutomation =
|
||||
(await createResponse.json()) as AutomationResponse;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully duplicated automation ${params.automation_id}`,
|
||||
@@ -143,7 +197,8 @@ export const automationConfigTool: Tool = {
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,62 +1,85 @@
|
||||
import { z } from 'zod';
|
||||
import { Tool, AutomationParams, HassState, AutomationResponse } from '../types/index.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { z } from "zod";
|
||||
import {
|
||||
Tool,
|
||||
AutomationParams,
|
||||
HassState,
|
||||
AutomationResponse,
|
||||
} from "../types/index.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
export const automationTool: Tool = {
|
||||
name: 'automation',
|
||||
description: 'Manage Home Assistant automations',
|
||||
name: "automation",
|
||||
description: "Manage Home Assistant automations",
|
||||
parameters: z.object({
|
||||
action: z.enum(['list', 'toggle', 'trigger']).describe('Action to perform with automation'),
|
||||
automation_id: z.string().optional().describe('Automation ID (required for toggle and trigger actions)'),
|
||||
action: z
|
||||
.enum(["list", "toggle", "trigger"])
|
||||
.describe("Action to perform with automation"),
|
||||
automation_id: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Automation ID (required for toggle and trigger actions)"),
|
||||
}),
|
||||
execute: async (params: AutomationParams) => {
|
||||
try {
|
||||
if (params.action === 'list') {
|
||||
if (params.action === "list") {
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/states`, {
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch automations: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to fetch automations: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const states = (await response.json()) as HassState[];
|
||||
const automations = states.filter((state) => state.entity_id.startsWith('automation.'));
|
||||
const automations = states.filter((state) =>
|
||||
state.entity_id.startsWith("automation."),
|
||||
);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
automations: automations.map((automation) => ({
|
||||
entity_id: automation.entity_id,
|
||||
name: automation.attributes.friendly_name || automation.entity_id.split('.')[1],
|
||||
name:
|
||||
automation.attributes.friendly_name ||
|
||||
automation.entity_id.split(".")[1],
|
||||
state: automation.state,
|
||||
last_triggered: automation.attributes.last_triggered,
|
||||
})),
|
||||
};
|
||||
} else {
|
||||
if (!params.automation_id) {
|
||||
throw new Error('Automation ID is required for toggle and trigger actions');
|
||||
throw new Error(
|
||||
"Automation ID is required for toggle and trigger actions",
|
||||
);
|
||||
}
|
||||
|
||||
const service = params.action === 'toggle' ? 'toggle' : 'trigger';
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/services/automation/${service}`, {
|
||||
method: 'POST',
|
||||
const service = params.action === "toggle" ? "toggle" : "trigger";
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/services/automation/${service}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
entity_id: params.automation_id,
|
||||
}),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to ${service} automation: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to ${service} automation: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
const responseData = await response.json() as AutomationResponse;
|
||||
const responseData = (await response.json()) as AutomationResponse;
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully ${service}d automation ${params.automation_id}`,
|
||||
@@ -66,7 +89,8 @@ export const automationTool: Tool = {
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,51 +1,94 @@
|
||||
import { z } from 'zod';
|
||||
import { Tool, CommandParams } from '../types/index.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { DomainSchema } from '../schemas.js';
|
||||
import { z } from "zod";
|
||||
import { Tool, CommandParams } from "../types/index.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
import { DomainSchema } from "../schemas.js";
|
||||
|
||||
// Define command constants
|
||||
const commonCommands = ['turn_on', 'turn_off', 'toggle'] as const;
|
||||
const coverCommands = [...commonCommands, 'open', 'close', 'stop', 'set_position', 'set_tilt_position'] as const;
|
||||
const climateCommands = [...commonCommands, 'set_temperature', 'set_hvac_mode', 'set_fan_mode', 'set_humidity'] as const;
|
||||
const commonCommands = ["turn_on", "turn_off", "toggle"] as const;
|
||||
const coverCommands = [
|
||||
...commonCommands,
|
||||
"open",
|
||||
"close",
|
||||
"stop",
|
||||
"set_position",
|
||||
"set_tilt_position",
|
||||
] as const;
|
||||
const climateCommands = [
|
||||
...commonCommands,
|
||||
"set_temperature",
|
||||
"set_hvac_mode",
|
||||
"set_fan_mode",
|
||||
"set_humidity",
|
||||
] as const;
|
||||
|
||||
export const controlTool: Tool = {
|
||||
name: 'control',
|
||||
description: 'Control Home Assistant devices and services',
|
||||
name: "control",
|
||||
description: "Control Home Assistant devices and services",
|
||||
parameters: z.object({
|
||||
command: z.enum([...commonCommands, ...coverCommands, ...climateCommands])
|
||||
.describe('The command to execute'),
|
||||
entity_id: z.string().describe('The entity ID to control'),
|
||||
command: z
|
||||
.enum([...commonCommands, ...coverCommands, ...climateCommands])
|
||||
.describe("The command to execute"),
|
||||
entity_id: z.string().describe("The entity ID to control"),
|
||||
// Common parameters
|
||||
state: z.string().optional().describe('The desired state for the entity'),
|
||||
state: z.string().optional().describe("The desired state for the entity"),
|
||||
// Light parameters
|
||||
brightness: z.number().min(0).max(255).optional()
|
||||
.describe('Brightness level for lights (0-255)'),
|
||||
color_temp: z.number().optional()
|
||||
.describe('Color temperature for lights'),
|
||||
rgb_color: z.tuple([z.number(), z.number(), z.number()]).optional()
|
||||
.describe('RGB color values'),
|
||||
brightness: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(255)
|
||||
.optional()
|
||||
.describe("Brightness level for lights (0-255)"),
|
||||
color_temp: z.number().optional().describe("Color temperature for lights"),
|
||||
rgb_color: z
|
||||
.tuple([z.number(), z.number(), z.number()])
|
||||
.optional()
|
||||
.describe("RGB color values"),
|
||||
// Cover parameters
|
||||
position: z.number().min(0).max(100).optional()
|
||||
.describe('Position for covers (0-100)'),
|
||||
tilt_position: z.number().min(0).max(100).optional()
|
||||
.describe('Tilt position for covers (0-100)'),
|
||||
position: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(100)
|
||||
.optional()
|
||||
.describe("Position for covers (0-100)"),
|
||||
tilt_position: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(100)
|
||||
.optional()
|
||||
.describe("Tilt position for covers (0-100)"),
|
||||
// Climate parameters
|
||||
temperature: z.number().optional()
|
||||
.describe('Target temperature for climate devices'),
|
||||
target_temp_high: z.number().optional()
|
||||
.describe('Target high temperature for climate devices'),
|
||||
target_temp_low: z.number().optional()
|
||||
.describe('Target low temperature for climate devices'),
|
||||
hvac_mode: z.enum(['off', 'heat', 'cool', 'heat_cool', 'auto', 'dry', 'fan_only']).optional()
|
||||
.describe('HVAC mode for climate devices'),
|
||||
fan_mode: z.enum(['auto', 'low', 'medium', 'high']).optional()
|
||||
.describe('Fan mode for climate devices'),
|
||||
humidity: z.number().min(0).max(100).optional()
|
||||
.describe('Target humidity for climate devices')
|
||||
temperature: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe("Target temperature for climate devices"),
|
||||
target_temp_high: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe("Target high temperature for climate devices"),
|
||||
target_temp_low: z
|
||||
.number()
|
||||
.optional()
|
||||
.describe("Target low temperature for climate devices"),
|
||||
hvac_mode: z
|
||||
.enum(["off", "heat", "cool", "heat_cool", "auto", "dry", "fan_only"])
|
||||
.optional()
|
||||
.describe("HVAC mode for climate devices"),
|
||||
fan_mode: z
|
||||
.enum(["auto", "low", "medium", "high"])
|
||||
.optional()
|
||||
.describe("Fan mode for climate devices"),
|
||||
humidity: z
|
||||
.number()
|
||||
.min(0)
|
||||
.max(100)
|
||||
.optional()
|
||||
.describe("Target humidity for climate devices"),
|
||||
}),
|
||||
execute: async (params: CommandParams) => {
|
||||
try {
|
||||
const domain = params.entity_id.split('.')[0] as keyof typeof DomainSchema.Values;
|
||||
const domain = params.entity_id.split(
|
||||
".",
|
||||
)[0] as keyof typeof DomainSchema.Values;
|
||||
|
||||
if (!Object.values(DomainSchema.Values).includes(domain)) {
|
||||
throw new Error(`Unsupported domain: ${domain}`);
|
||||
@@ -53,12 +96,12 @@ export const controlTool: Tool = {
|
||||
|
||||
const service = params.command;
|
||||
const serviceData: Record<string, any> = {
|
||||
entity_id: params.entity_id
|
||||
entity_id: params.entity_id,
|
||||
};
|
||||
|
||||
// Handle domain-specific parameters
|
||||
switch (domain) {
|
||||
case 'light':
|
||||
case "light":
|
||||
if (params.brightness !== undefined) {
|
||||
serviceData.brightness = params.brightness;
|
||||
}
|
||||
@@ -70,17 +113,20 @@ export const controlTool: Tool = {
|
||||
}
|
||||
break;
|
||||
|
||||
case 'cover':
|
||||
if (service === 'set_position' && params.position !== undefined) {
|
||||
case "cover":
|
||||
if (service === "set_position" && params.position !== undefined) {
|
||||
serviceData.position = params.position;
|
||||
}
|
||||
if (service === 'set_tilt_position' && params.tilt_position !== undefined) {
|
||||
if (
|
||||
service === "set_tilt_position" &&
|
||||
params.tilt_position !== undefined
|
||||
) {
|
||||
serviceData.tilt_position = params.tilt_position;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'climate':
|
||||
if (service === 'set_temperature') {
|
||||
case "climate":
|
||||
if (service === "set_temperature") {
|
||||
if (params.temperature !== undefined) {
|
||||
serviceData.temperature = params.temperature;
|
||||
}
|
||||
@@ -91,19 +137,19 @@ export const controlTool: Tool = {
|
||||
serviceData.target_temp_low = params.target_temp_low;
|
||||
}
|
||||
}
|
||||
if (service === 'set_hvac_mode' && params.hvac_mode !== undefined) {
|
||||
if (service === "set_hvac_mode" && params.hvac_mode !== undefined) {
|
||||
serviceData.hvac_mode = params.hvac_mode;
|
||||
}
|
||||
if (service === 'set_fan_mode' && params.fan_mode !== undefined) {
|
||||
if (service === "set_fan_mode" && params.fan_mode !== undefined) {
|
||||
serviceData.fan_mode = params.fan_mode;
|
||||
}
|
||||
if (service === 'set_humidity' && params.humidity !== undefined) {
|
||||
if (service === "set_humidity" && params.humidity !== undefined) {
|
||||
serviceData.humidity = params.humidity;
|
||||
}
|
||||
break;
|
||||
|
||||
case 'switch':
|
||||
case 'contact':
|
||||
case "switch":
|
||||
case "contact":
|
||||
// These domains only support basic operations (turn_on, turn_off, toggle)
|
||||
break;
|
||||
|
||||
@@ -112,28 +158,34 @@ export const controlTool: Tool = {
|
||||
}
|
||||
|
||||
// Call Home Assistant service
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/services/${domain}/${service}`, {
|
||||
method: 'POST',
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/services/${domain}/${service}`,
|
||||
{
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(serviceData),
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to execute ${service} for ${params.entity_id}: ${response.statusText}`);
|
||||
throw new Error(
|
||||
`Failed to execute ${service} for ${params.entity_id}: ${response.statusText}`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `Successfully executed ${service} for ${params.entity_id}`
|
||||
message: `Successfully executed ${service} for ${params.entity_id}`,
|
||||
};
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred'
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
@@ -1,21 +1,35 @@
|
||||
import { z } from 'zod';
|
||||
import { Tool, HistoryParams } from '../types/index.js';
|
||||
import { APP_CONFIG } from '../config/app.config.js';
|
||||
import { z } from "zod";
|
||||
import { Tool, HistoryParams } from "../types/index.js";
|
||||
import { APP_CONFIG } from "../config/app.config.js";
|
||||
|
||||
export const historyTool: Tool = {
|
||||
name: 'get_history',
|
||||
description: 'Get state history for Home Assistant entities',
|
||||
name: "get_history",
|
||||
description: "Get state history for Home Assistant entities",
|
||||
parameters: z.object({
|
||||
entity_id: z.string().describe('The entity ID to get history for'),
|
||||
start_time: z.string().optional().describe('Start time in ISO format. Defaults to 24 hours ago'),
|
||||
end_time: z.string().optional().describe('End time in ISO format. Defaults to now'),
|
||||
minimal_response: z.boolean().optional().describe('Return minimal response to reduce data size'),
|
||||
significant_changes_only: z.boolean().optional().describe('Only return significant state changes'),
|
||||
entity_id: z.string().describe("The entity ID to get history for"),
|
||||
start_time: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("Start time in ISO format. Defaults to 24 hours ago"),
|
||||
end_time: z
|
||||
.string()
|
||||
.optional()
|
||||
.describe("End time in ISO format. Defaults to now"),
|
||||
minimal_response: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("Return minimal response to reduce data size"),
|
||||
significant_changes_only: z
|
||||
.boolean()
|
||||
.optional()
|
||||
.describe("Only return significant state changes"),
|
||||
}),
|
||||
execute: async (params: HistoryParams) => {
|
||||
try {
|
||||
const now = new Date();
|
||||
const startTime = params.start_time ? new Date(params.start_time) : new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||
const startTime = params.start_time
|
||||
? new Date(params.start_time)
|
||||
: new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||
const endTime = params.end_time ? new Date(params.end_time) : now;
|
||||
|
||||
// Build query parameters
|
||||
@@ -27,12 +41,15 @@ export const historyTool: Tool = {
|
||||
end_time: endTime.toISOString(),
|
||||
});
|
||||
|
||||
const response = await fetch(`${APP_CONFIG.HASS_HOST}/api/history/period/${startTime.toISOString()}?${queryParams.toString()}`, {
|
||||
const response = await fetch(
|
||||
`${APP_CONFIG.HASS_HOST}/api/history/period/${startTime.toISOString()}?${queryParams.toString()}`,
|
||||
{
|
||||
headers: {
|
||||
Authorization: `Bearer ${APP_CONFIG.HASS_TOKEN}`,
|
||||
'Content-Type': 'application/json',
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch history: ${response.statusText}`);
|
||||
@@ -46,7 +63,8 @@ export const historyTool: Tool = {
|
||||
} catch (error) {
|
||||
return {
|
||||
success: false,
|
||||
message: error instanceof Error ? error.message : 'Unknown error occurred',
|
||||
message:
|
||||
error instanceof Error ? error.message : "Unknown error occurred",
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user