Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b9727981cc | ||
|
|
e1db799b1d | ||
|
|
905339fb67 | ||
|
|
849b080aba | ||
|
|
f8bbe4af6f | ||
|
|
3a6f79c9a8 | ||
|
|
60f18f8e71 | ||
|
|
47f11b3d95 | ||
|
|
f24be8ff53 | ||
|
|
dfff432321 | ||
|
|
d59bf02d08 | ||
|
|
345a5888d9 | ||
|
|
d6a5771e01 | ||
|
|
5f4ddfbd88 | ||
|
|
c11b40da9e | ||
|
|
3a54766b61 | ||
|
|
8b1948ce30 | ||
|
|
38ee5368d1 |
@@ -73,7 +73,6 @@ temp/
|
|||||||
.cloud/
|
.cloud/
|
||||||
*.db
|
*.db
|
||||||
*.db-*
|
*.db-*
|
||||||
bun.lockb
|
|
||||||
.cursor/
|
.cursor/
|
||||||
.cursor*
|
.cursor*
|
||||||
.cursorconfig
|
.cursorconfig
|
||||||
@@ -102,3 +102,10 @@ TEST_HASS_HOST=http://localhost:8123
|
|||||||
TEST_HASS_TOKEN=test_token
|
TEST_HASS_TOKEN=test_token
|
||||||
TEST_HASS_SOCKET_URL=ws://localhost:8123/api/websocket
|
TEST_HASS_SOCKET_URL=ws://localhost:8123/api/websocket
|
||||||
TEST_PORT=3001
|
TEST_PORT=3001
|
||||||
|
|
||||||
|
# Speech Features Configuration
|
||||||
|
ENABLE_SPEECH_FEATURES=false
|
||||||
|
ENABLE_WAKE_WORD=true
|
||||||
|
ENABLE_SPEECH_TO_TEXT=true
|
||||||
|
WHISPER_MODEL_PATH=/models
|
||||||
|
WHISPER_MODEL_TYPE=base
|
||||||
64
.github/workflows/deploy-docs.yml
vendored
Normal file
64
.github/workflows/deploy-docs.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
name: Deploy Documentation to GitHub Pages
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
paths:
|
||||||
|
- 'docs/**'
|
||||||
|
- '.github/workflows/deploy-docs.yml'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
|
|
||||||
|
# Allow only one concurrent deployment
|
||||||
|
concurrency:
|
||||||
|
group: "pages"
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Ruby
|
||||||
|
uses: ruby/setup-ruby@v1
|
||||||
|
with:
|
||||||
|
ruby-version: '3.2'
|
||||||
|
bundler-cache: true
|
||||||
|
cache-version: 0
|
||||||
|
|
||||||
|
- name: Setup Pages
|
||||||
|
uses: actions/configure-pages@v4
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
cd docs
|
||||||
|
bundle install
|
||||||
|
|
||||||
|
- name: Build site
|
||||||
|
run: |
|
||||||
|
cd docs
|
||||||
|
bundle exec jekyll build
|
||||||
|
env:
|
||||||
|
JEKYLL_ENV: production
|
||||||
|
|
||||||
|
- name: Upload artifact
|
||||||
|
uses: actions/upload-pages-artifact@v3
|
||||||
|
with:
|
||||||
|
path: docs/_site
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
needs: build
|
||||||
|
environment:
|
||||||
|
name: github-pages
|
||||||
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
id: deployment
|
||||||
|
uses: actions/deploy-pages@v4
|
||||||
69
Dockerfile
69
Dockerfile
@@ -1,23 +1,70 @@
|
|||||||
# Use Bun as the base image
|
# Use Node.js as base for building
|
||||||
FROM oven/bun:1.0.25
|
FROM node:20-slim as builder
|
||||||
|
|
||||||
# Set working directory
|
# Set working directory
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy package files
|
# Install bun
|
||||||
|
RUN npm install -g bun@1.0.25
|
||||||
|
|
||||||
|
# Install only the minimal dependencies needed and clean up in the same layer
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& apt-get clean \
|
||||||
|
&& rm -rf /var/cache/apt/*
|
||||||
|
|
||||||
|
# Set build-time environment variables
|
||||||
|
ENV NODE_ENV=production \
|
||||||
|
NODE_OPTIONS="--max-old-space-size=2048" \
|
||||||
|
BUN_INSTALL_CACHE=0
|
||||||
|
|
||||||
|
# Copy only package files first
|
||||||
COPY package.json ./
|
COPY package.json ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies with a clean slate
|
||||||
RUN bun install
|
RUN rm -rf node_modules .bun bun.lockb && \
|
||||||
|
bun install --no-save
|
||||||
|
|
||||||
# Copy source code
|
# Copy source files and build
|
||||||
COPY . .
|
COPY src ./src
|
||||||
|
COPY tsconfig*.json ./
|
||||||
|
RUN bun build ./src/index.ts --target=bun --minify --outdir=./dist
|
||||||
|
|
||||||
# Build TypeScript
|
# Create a smaller production image
|
||||||
RUN bun run build
|
FROM node:20-slim as runner
|
||||||
|
|
||||||
|
# Install bun in production image
|
||||||
|
RUN npm install -g bun@1.0.25
|
||||||
|
|
||||||
|
# Set production environment variables
|
||||||
|
ENV NODE_ENV=production \
|
||||||
|
NODE_OPTIONS="--max-old-space-size=1024"
|
||||||
|
|
||||||
|
# Create a non-root user
|
||||||
|
RUN addgroup --system --gid 1001 nodejs && \
|
||||||
|
adduser --system --uid 1001 bunjs
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy only the necessary files from builder
|
||||||
|
COPY --from=builder --chown=bunjs:nodejs /app/dist ./dist
|
||||||
|
COPY --from=builder --chown=bunjs:nodejs /app/node_modules ./node_modules
|
||||||
|
COPY --chown=bunjs:nodejs package.json ./
|
||||||
|
|
||||||
|
# Create logs directory with proper permissions
|
||||||
|
RUN mkdir -p /app/logs && chown -R bunjs:nodejs /app/logs
|
||||||
|
|
||||||
|
# Switch to non-root user
|
||||||
|
USER bunjs
|
||||||
|
|
||||||
|
# Health check
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD curl -f http://localhost:4000/health || exit 1
|
||||||
|
|
||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 4000
|
EXPOSE 4000
|
||||||
|
|
||||||
# Start the application
|
# Start the application with optimized flags
|
||||||
CMD ["bun", "run", "start"]
|
CMD ["bun", "--smol", "run", "start"]
|
||||||
380
README.md
380
README.md
@@ -1,230 +1,288 @@
|
|||||||
# Model Context Protocol (MCP) Server for Home Assistant
|
# 🚀 MCP Server for Home Assistant - Bringing AI-Powered Smart Homes to Life!
|
||||||
|
|
||||||
The Model Context Protocol (MCP) Server is a robust, secure, and high-performance bridge that integrates Home Assistant with Language Learning Models (LLMs), enabling natural language control and real-time monitoring of your smart home devices. Unlock advanced automation, control, and analytics for your Home Assistant ecosystem.
|
[](LICENSE)
|
||||||
|
[](https://bun.sh)
|
||||||
|
[](https://www.typescriptlang.org)
|
||||||
|
[](#)
|
||||||
|
[](https://jango-blockchained.github.io/homeassistant-mcp/)
|
||||||
|
[](https://www.docker.com)
|
||||||
|
|
||||||

|
---
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
## Table of Contents
|
## Overview 🌐
|
||||||
|
|
||||||
- [Overview](#overview)
|
Welcome to the **Model Context Protocol (MCP) Server for Home Assistant**! This robust platform bridges Home Assistant with cutting-edge Language Learning Models (LLMs), enabling natural language interactions and real-time automation of your smart devices. Imagine entering your home, saying:
|
||||||
- [Key Features](#key-features)
|
|
||||||
- [Architecture & Design](#architecture--design)
|
|
||||||
- [Installation](#installation)
|
|
||||||
- [Basic Setup](#basic-setup)
|
|
||||||
- [Docker Setup (Recommended)](#docker-setup-recommended)
|
|
||||||
- [Usage](#usage)
|
|
||||||
- [API & Documentation](#api--documentation)
|
|
||||||
- [Development](#development)
|
|
||||||
- [Roadmap & Future Plans](#roadmap--future-plans)
|
|
||||||
- [Community & Support](#community--support)
|
|
||||||
- [Contributing](#contributing)
|
|
||||||
- [Troubleshooting & FAQ](#troubleshooting--faq)
|
|
||||||
- [License](#license)
|
|
||||||
|
|
||||||
## Overview
|
> “Hey MCP, dim the lights and start my evening playlist,”
|
||||||
|
|
||||||
The MCP Server bridges Home Assistant with advanced LLM integrations to deliver intuitive control, automation, and state monitoring. Leveraging a high-performance runtime and real-time communication protocols, MCP offers a seamless experience for managing your smart home.
|
and watching your home transform instantly—that's the magic that MCP Server delivers!
|
||||||
|
|
||||||
## Key Features
|
---
|
||||||
|
|
||||||
### Device Control & Monitoring
|
## Key Benefits ✨
|
||||||
- **Smart Device Control:** Manage lights, climate, covers, switches, sensors, media players, fans, locks, vacuums, and cameras using natural language commands.
|
|
||||||
- **Real-time Updates:** Receive instant notifications and updates via Server-Sent Events (SSE).
|
|
||||||
|
|
||||||
### System & Automation Management
|
### 🎮 Device Control & Monitoring
|
||||||
- **Automation Engine:** Create, modify, and trigger custom automation rules with ease.
|
- **Voice-Controlled Automation:**
|
||||||
- **Add-on & Package Management:** Integrates with HACS for deploying custom integrations, themes, scripts, and applications.
|
Use simple commands like "Turn on the kitchen lights" or "Set the thermostat to 22°C" without touching a switch.
|
||||||
- **Robust System Management:** Features advanced state monitoring, error handling, and security safeguards.
|
**Real-World Example:**
|
||||||
|
In the morning, say "Good morning! Open the blinds and start the coffee machine" to kickstart your day automatically.
|
||||||
|
|
||||||
## Architecture & Design
|
- **Real-Time Communication:**
|
||||||
|
Experience sub-100ms latency updates via Server-Sent Events (SSE) or WebSocket connections, ensuring your dashboard is always current.
|
||||||
|
**Real-World Example:**
|
||||||
|
Monitor energy usage instantly during peak hours and adjust remotely for efficient consumption.
|
||||||
|
|
||||||
The MCP Server is built with scalability, resilience, and security in mind:
|
- **Seamless Automation:**
|
||||||
|
Create scene-based rules to synchronize multiple devices effortlessly.
|
||||||
|
**Real-World Example:**
|
||||||
|
For movie nights, have MCP dim the lights, adjust the sound system, and launch your favorite streaming app with just one command.
|
||||||
|
|
||||||
- **High-Performance Runtime:** Powered by Bun for fast startup, efficient memory utilization, and native TypeScript support.
|
### 🤖 AI-Powered Enhancements
|
||||||
- **Real-time Communication:** Employs Server-Sent Events (SSE) for continuous, real-time data updates.
|
- **Natural Language Processing (NLP):**
|
||||||
- **Modular & Extensible:** Designed to support plugins, add-ons, and custom automation scripts, allowing for easy expansion.
|
Convert everyday speech into actionable commands—just say, "Prepare the house for dinner," and MCP will adjust lighting, temperature, and even play soft background music.
|
||||||
- **Secure API Integration:** Implements token-based authentication, rate limiting, and adherence to best security practices.
|
|
||||||
|
|
||||||
_For a deeper dive into the system architecture, please refer to our [Architecture Documentation](docs/ARCHITECTURE.md) (if available)._
|
- **Predictive Automation & Suggestions:**
|
||||||
|
Receive proactive recommendations based on usage habits and environmental trends.
|
||||||
|
**Real-World Example:**
|
||||||
|
When home temperature fluctuates unexpectedly, MCP suggests an optimal setting and notifies you immediately.
|
||||||
|
|
||||||
## Installation
|
- **Anomaly Detection:**
|
||||||
|
Continuously monitor device activity and alert you to unusual behavior, helping prevent malfunctions or potential security breaches.
|
||||||
|
|
||||||
### Basic Setup
|
---
|
||||||
|
|
||||||
|
## Architectural Overview 🏗
|
||||||
|
|
||||||
|
Our architecture is engineered for performance, scalability, and security. The following Mermaid diagram illustrates the data flow and component interactions:
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph TD
|
||||||
|
subgraph Client
|
||||||
|
A[Client Application<br/>(Web / Mobile / Voice)]
|
||||||
|
end
|
||||||
|
subgraph CDN
|
||||||
|
B[CDN / Cache]
|
||||||
|
end
|
||||||
|
subgraph Server
|
||||||
|
C[Bun Native Server]
|
||||||
|
E[NLP Engine<br/>& Language Processing Module]
|
||||||
|
end
|
||||||
|
subgraph Integration
|
||||||
|
D[Home Assistant<br/>(Devices, Lights, Thermostats)]
|
||||||
|
end
|
||||||
|
|
||||||
|
A -->|HTTP Request| B
|
||||||
|
B -- Cache Miss --> C
|
||||||
|
C -->|Interpret Command| E
|
||||||
|
E -->|Determine Action| D
|
||||||
|
D -->|Return State/Action| C
|
||||||
|
C -->|Response| B
|
||||||
|
B -->|Cached/Processed Response| A
|
||||||
|
```
|
||||||
|
|
||||||
|
Learn more about our architecture in the [Architecture Documentation](docs/architecture.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Technical Stack 🔧
|
||||||
|
|
||||||
|
Our solution is built on a modern, high-performance stack that powers every feature:
|
||||||
|
|
||||||
|
- **Bun:**
|
||||||
|
A next-generation JavaScript runtime offering rapid startup times, native TypeScript support, and high performance.
|
||||||
|
👉 [Learn about Bun](https://bun.sh)
|
||||||
|
|
||||||
|
- **Bun Native Server:**
|
||||||
|
Utilizes Bun's built-in HTTP server to efficiently process API requests with sub-100ms response times.
|
||||||
|
👉 See the [Installation Guide](docs/getting-started/installation.md) for details.
|
||||||
|
|
||||||
|
- **Natural Language Processing (NLP) & LLM Integration:**
|
||||||
|
Processes and interprets natural language commands using state-of-the-art LLMs and custom NLP modules.
|
||||||
|
👉 Find API usage details in the [API Documentation](docs/api.md).
|
||||||
|
|
||||||
|
- **Home Assistant Integration:**
|
||||||
|
Provides seamless connectivity with Home Assistant, ensuring flawless communication with your smart devices.
|
||||||
|
👉 Refer to the [Usage Guide](docs/usage.md) for more information.
|
||||||
|
|
||||||
|
- **Redis Cache:**
|
||||||
|
Enables rapid data retrieval and session persistence essential for real-time updates.
|
||||||
|
|
||||||
|
- **TypeScript:**
|
||||||
|
Enhances type safety and developer productivity across the entire codebase.
|
||||||
|
|
||||||
|
- **JWT & Security Middleware:**
|
||||||
|
Protects your ecosystem with JWT-based authentication, request sanitization, rate-limiting, and encryption.
|
||||||
|
|
||||||
|
- **Containerization with Docker:**
|
||||||
|
Enables scalable, isolated deployments for production environments.
|
||||||
|
|
||||||
|
For further technical details, check out our [Documentation Index](docs/index.md).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Installation 🛠
|
||||||
|
|
||||||
|
### 🐳 Docker Setup (Recommended)
|
||||||
|
|
||||||
|
For a hassle-free, containerized deployment:
|
||||||
|
|
||||||
1. **Install Bun:** If Bun is not installed:
|
|
||||||
```bash
|
```bash
|
||||||
|
# 1. Clone the repository (using a shallow copy for efficiency)
|
||||||
|
git clone --depth 1 https://github.com/jango-blockchained/homeassistant-mcp.git
|
||||||
|
|
||||||
|
# 2. Configure your environment: copy the example file and edit it with your Home Assistant credentials
|
||||||
|
cp .env.example .env # Modify .env with your Home Assistant host, tokens, etc.
|
||||||
|
|
||||||
|
# 3. Build and run the Docker containers
|
||||||
|
docker compose up -d --build
|
||||||
|
|
||||||
|
# 4. View real-time logs (last 50 log entries)
|
||||||
|
docker compose logs -f --tail=50
|
||||||
|
```
|
||||||
|
|
||||||
|
👉 Refer to our [Installation Guide](docs/getting-started/installation.md) for full details.
|
||||||
|
|
||||||
|
### 💻 Bare Metal Installation
|
||||||
|
|
||||||
|
For direct deployment on your host machine:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Install Bun (if not already installed)
|
||||||
curl -fsSL https://bun.sh/install | bash
|
curl -fsSL https://bun.sh/install | bash
|
||||||
|
|
||||||
|
# 2. Install project dependencies with caching support
|
||||||
|
bun install --frozen-lockfile
|
||||||
|
|
||||||
|
# 3. Launch the server in development mode with hot-reload enabled
|
||||||
|
bun run dev --watch
|
||||||
```
|
```
|
||||||
|
|
||||||
2. **Clone the Repository:**
|
---
|
||||||
```bash
|
|
||||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
|
||||||
cd homeassistant-mcp
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Install Dependencies:**
|
## Real-World Usage Examples 🔍
|
||||||
```bash
|
|
||||||
bun install
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Build the Project:**
|
### 📱 Smart Home Dashboard Integration
|
||||||
```bash
|
Integrate MCP's real-time updates into your custom dashboard for a dynamic smart home experience:
|
||||||
bun run build
|
|
||||||
```
|
|
||||||
|
|
||||||
### Docker Setup (Recommended)
|
|
||||||
|
|
||||||
1. **Clone the Repository:**
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/jango-blockchained/homeassistant-mcp.git
|
|
||||||
cd homeassistant-mcp
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Configure Environment:**
|
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
```
|
|
||||||
Customize the `.env` file with your Home Assistant configuration.
|
|
||||||
|
|
||||||
3. **Deploy with Docker Compose:**
|
|
||||||
```bash
|
|
||||||
docker compose up -d
|
|
||||||
```
|
|
||||||
- View logs: `docker compose logs -f`
|
|
||||||
- Stop the server: `docker compose down`
|
|
||||||
|
|
||||||
4. **Update the Application:**
|
|
||||||
```bash
|
|
||||||
git pull && docker compose up -d --build
|
|
||||||
```
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
Once the server is running, open your browser at [http://localhost:3000](http://localhost:3000). For real-time device updates, integrate the SSE endpoint in your application:
|
|
||||||
|
|
||||||
```javascript
|
```javascript
|
||||||
const eventSource = new EventSource('http://localhost:3000/subscribe_events?token=YOUR_TOKEN&domain=light');
|
const eventSource = new EventSource('http://localhost:3000/subscribe_events?token=YOUR_TOKEN&domain=light');
|
||||||
|
|
||||||
eventSource.onmessage = (event) => {
|
eventSource.onmessage = (event) => {
|
||||||
const data = JSON.parse(event.data);
|
const data = JSON.parse(event.data);
|
||||||
console.log('Update received:', data);
|
console.log('Real-time update:', data);
|
||||||
|
// Update your UI dashboard, e.g., refresh a light intensity indicator.
|
||||||
};
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
## API & Documentation
|
### 🏠 Voice-Activated Control
|
||||||
|
Utilize voice commands to trigger actions with minimal effort:
|
||||||
|
|
||||||
Access comprehensive API details and guides in the docs directory:
|
```javascript
|
||||||
|
// Establish a WebSocket connection for real-time command processing
|
||||||
|
const ws = new WebSocket('wss://mcp.yourha.com/ws');
|
||||||
|
|
||||||
- **API Reference:** [API Documentation](docs/API.md)
|
ws.onmessage = ({ data }) => {
|
||||||
- **SSE Documentation:** [SSE API](docs/SSE_API.md)
|
const update = JSON.parse(data);
|
||||||
- **Troubleshooting Guide:** [TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)
|
if (update.entity_id === 'light.living_room') {
|
||||||
- **Architecture Details:** [Architecture Documentation](docs/ARCHITECTURE.md) _(if available)_
|
console.log('Adjusting living room lighting based on voice command...');
|
||||||
|
// Additional logic to update your UI or trigger further actions can go here.
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
## Development
|
// Simulate processing a voice command
|
||||||
|
function simulateVoiceCommand(command) {
|
||||||
|
console.log("Processing voice command:", command);
|
||||||
|
// Integrate with your actual voice-to-text system as needed.
|
||||||
|
}
|
||||||
|
|
||||||
### Running in Development Mode
|
simulateVoiceCommand("Turn off all the lights for bedtime");
|
||||||
|
|
||||||
```bash
|
|
||||||
bun run dev
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Running Tests
|
👉 Learn more in our [Usage Guide](docs/usage.md).
|
||||||
|
|
||||||
- Execute all tests:
|
---
|
||||||
```bash
|
|
||||||
bun test
|
|
||||||
```
|
|
||||||
|
|
||||||
- Run tests with coverage:
|
## Update Strategy 🔄
|
||||||
```bash
|
|
||||||
bun test --coverage
|
|
||||||
```
|
|
||||||
|
|
||||||
### Production Build & Start
|
Maintain a seamless operation with zero downtime updates:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bun run build
|
# 1. Pull the latest Docker images
|
||||||
bun start
|
docker compose pull
|
||||||
|
|
||||||
|
# 2. Rebuild and restart containers smoothly
|
||||||
|
docker compose up -d --build
|
||||||
|
|
||||||
|
# 3. Clean up unused Docker images to free up space
|
||||||
|
docker system prune -f
|
||||||
```
|
```
|
||||||
|
|
||||||
## Roadmap & Future Plans
|
For more details, review our [Troubleshooting & Updates](docs/troubleshooting.md).
|
||||||
|
|
||||||
The MCP Server is under active development and improvement. Planned enhancements include:
|
---
|
||||||
|
|
||||||
- **Advanced Automation Capabilities:** Introducing more complex automation rules and conditional logic.
|
## Security Features 🔐
|
||||||
- **Enhanced Security Features:** Additional authentication layers, encryption enhancements, and security monitoring tools.
|
|
||||||
- **User Interface Improvements:** Development of a more intuitive web dashboard for easier device management.
|
|
||||||
- **Expanded Integrations:** Support for a wider array of smart home devices and third-party services.
|
|
||||||
- **Performance Optimizations:** Continued efforts to reduce latency and improve resource efficiency.
|
|
||||||
|
|
||||||
_For additional details, check out our [Roadmap](docs/ROADMAP.md) (if available)._
|
We prioritize the security of your smart home with multiple layers of defense:
|
||||||
|
- **JWT Authentication 🔑:** Secure, token-based API access to prevent unauthorized usage.
|
||||||
|
- **Request Sanitization 🧼:** Automatic filtering and validation of API requests to combat injection attacks.
|
||||||
|
- **Rate Limiting & Fail2Ban 🚫:** Monitors requests to prevent brute force and DDoS attacks.
|
||||||
|
- **End-to-End Encryption 🔒:** Ensures that your commands and data remain private during transmission.
|
||||||
|
|
||||||
## Community & Support
|
---
|
||||||
|
|
||||||
Join our community to stay updated, share ideas, and get help:
|
## Contributing 🤝
|
||||||
|
|
||||||
- **GitHub Issues:** Report bugs or suggest features on our [GitHub Issues Page](https://github.com/jango-blockchained/homeassistant-mcp/issues).
|
We value community contributions! Here's how you can help improve MCP Server:
|
||||||
- **Discussion Forums:** Connect with other users and contributors in our community forums.
|
1. **Fork the Repository 🍴**
|
||||||
- **Chat Platforms:** Join our real-time discussions on [Discord](#) or [Slack](#).
|
Create your own copy of the project.
|
||||||
|
2. **Create a Feature Branch 🌿**
|
||||||
## Contributing
|
|
||||||
|
|
||||||
We welcome your contributions! To get started:
|
|
||||||
|
|
||||||
1. Fork the repository.
|
|
||||||
2. Create your feature branch:
|
|
||||||
```bash
|
```bash
|
||||||
git checkout -b feature/your-feature-name
|
git checkout -b feature/your-feature-name
|
||||||
```
|
```
|
||||||
3. Install dependencies:
|
3. **Install Dependencies & Run Tests 🧪**
|
||||||
```bash
|
```bash
|
||||||
bun install
|
bun install
|
||||||
|
bun test --coverage
|
||||||
```
|
```
|
||||||
4. Make your changes and run tests:
|
4. **Make Your Changes & Commit 📝**
|
||||||
```bash
|
Follow the [Conventional Commits](https://www.conventionalcommits.org) guidelines.
|
||||||
bun test
|
5. **Open a Pull Request 🔀**
|
||||||
```
|
Submit your changes for review.
|
||||||
5. Commit and push your changes, then open a Pull Request.
|
|
||||||
|
|
||||||
_For detailed guidelines, see [CONTRIBUTING.md](CONTRIBUTING.md)._
|
Read more in our [Contribution Guidelines](docs/contributing.md).
|
||||||
|
|
||||||
## Troubleshooting & FAQ
|
---
|
||||||
|
|
||||||
### Common Issues
|
## Roadmap & Future Enhancements 🔮
|
||||||
|
|
||||||
- **Connection Problems:** Ensure that your `HASS_HOST`, authentication token, and WebSocket URL are correctly configured.
|
We're continuously evolving MCP Server. Upcoming features include:
|
||||||
- **Docker Deployment:** Confirm that Docker is running and that your `.env` file contains the correct settings.
|
- **AI Assistant Integration (Q4 2024):**
|
||||||
- **Automation Errors:** Verify entity availability and review your automation configurations for potential issues.
|
Smarter, context-aware voice commands and personalized automation.
|
||||||
|
- **Predictive Automation (Q1 2025):**
|
||||||
|
Enhanced scheduling capabilities powered by advanced AI.
|
||||||
|
- **Enhanced Security (Q2 2024):**
|
||||||
|
Introduction of multi-factor authentication, advanced monitoring, and rigorous encryption methods.
|
||||||
|
- **Performance Optimizations (Q3 2024):**
|
||||||
|
Reducing latency further, optimizing caching, and improving load balancing.
|
||||||
|
|
||||||
_For more troubleshooting details, refer to [TROUBLESHOOTING.md](docs/TROUBLESHOOTING.md)._
|
For more details, see our [Roadmap](docs/roadmap.md).
|
||||||
|
|
||||||
### Frequently Asked Questions
|
---
|
||||||
|
|
||||||
**Q: What platforms does MCP Server support?**
|
## Community & Support 🌍
|
||||||
|
|
||||||
A: MCP Server runs on Linux, macOS, and Windows (Docker is recommended for Windows environments).
|
Your feedback and collaboration are vital! Join our community:
|
||||||
|
- **GitHub Issues:** Report bugs or request features via our [Issues Page](https://github.com/jango-blockchained/homeassistant-mcp/issues).
|
||||||
|
- **Discord & Slack:** Connect with fellow users and developers in real-time.
|
||||||
|
- **Documentation:** Find comprehensive guides on the [MCP Documentation Website](https://jango-blockchained.github.io/homeassistant-mcp/).
|
||||||
|
|
||||||
**Q: How do I report a bug or request a feature?**
|
---
|
||||||
|
|
||||||
A: Please use our [GitHub Issues Page](https://github.com/jango-blockchained/homeassistant-mcp/issues) to report bugs or request new features.
|
## License 📜
|
||||||
|
|
||||||
**Q: Can I contribute to the project?**
|
This project is licensed under the MIT License. See [LICENSE](LICENSE) for full details.
|
||||||
|
|
||||||
A: Absolutely! We welcome contributions from the community. See the [Contributing](#contributing) section for more details.
|
---
|
||||||
|
|
||||||
## License
|
🔋 Batteries included.
|
||||||
|
|
||||||
This project is licensed under the MIT License. See [LICENSE](LICENSE) for the full license text.
|
|
||||||
|
|
||||||
## Documentation
|
|
||||||
|
|
||||||
Full documentation is available at: https://yourusername.github.io/your-repo-name/
|
|
||||||
|
|
||||||
## Quick Start
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
570
bun.lock
Executable file
570
bun.lock
Executable file
@@ -0,0 +1,570 @@
|
|||||||
|
{
|
||||||
|
"lockfileVersion": 0,
|
||||||
|
"workspaces": {
|
||||||
|
"": {
|
||||||
|
"dependencies": {
|
||||||
|
"@elysiajs/cors": "^1.2.0",
|
||||||
|
"@elysiajs/swagger": "^1.2.0",
|
||||||
|
"@types/jsonwebtoken": "^9.0.5",
|
||||||
|
"@types/node": "^20.11.24",
|
||||||
|
"@types/sanitize-html": "^2.9.5",
|
||||||
|
"@types/ws": "^8.5.10",
|
||||||
|
"dotenv": "^16.4.5",
|
||||||
|
"elysia": "^1.2.11",
|
||||||
|
"helmet": "^7.1.0",
|
||||||
|
"jsonwebtoken": "^9.0.2",
|
||||||
|
"node-fetch": "^3.3.2",
|
||||||
|
"sanitize-html": "^2.11.0",
|
||||||
|
"typescript": "^5.3.3",
|
||||||
|
"winston": "^3.11.0",
|
||||||
|
"winston-daily-rotate-file": "^5.0.0",
|
||||||
|
"ws": "^8.16.0",
|
||||||
|
"zod": "^3.22.4",
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/uuid": "^10.0.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^7.1.0",
|
||||||
|
"@typescript-eslint/parser": "^7.1.0",
|
||||||
|
"bun-types": "^1.2.2",
|
||||||
|
"eslint": "^8.57.0",
|
||||||
|
"eslint-config-prettier": "^9.1.0",
|
||||||
|
"eslint-plugin-prettier": "^5.1.3",
|
||||||
|
"husky": "^9.0.11",
|
||||||
|
"prettier": "^3.2.5",
|
||||||
|
"supertest": "^6.3.3",
|
||||||
|
"uuid": "^11.0.5",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"packages": {
|
||||||
|
"@colors/colors": ["@colors/colors@1.6.0", "", {}, "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA=="],
|
||||||
|
|
||||||
|
"@dabh/diagnostics": ["@dabh/diagnostics@2.0.3", "", { "dependencies": { "colorspace": "1.1.x", "enabled": "2.0.x", "kuler": "^2.0.0" } }, "sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA=="],
|
||||||
|
|
||||||
|
"@elysiajs/cors": ["@elysiajs/cors@1.2.0", "", { "peerDependencies": { "elysia": ">= 1.2.0" } }, "sha512-qsJwDAg6WfdQRMfj6uSMcDPSpXvm/zQFeAX1uuJXhIgazH8itSfcDxcH9pMuXVRX1yQNi2pPwNQLJmAcw5mzvw=="],
|
||||||
|
|
||||||
|
"@elysiajs/swagger": ["@elysiajs/swagger@1.2.0", "", { "dependencies": { "@scalar/themes": "^0.9.52", "@scalar/types": "^0.0.12", "openapi-types": "^12.1.3", "pathe": "^1.1.2" }, "peerDependencies": { "elysia": ">= 1.2.0" } }, "sha512-OPx93DP6rM2VHjA3D44Xiz5MYm9AYlO2NGWPsnSsdyvaOCiL9wJj529583h7arX4iIEYE5LiLB0/A45unqbopw=="],
|
||||||
|
|
||||||
|
"@eslint-community/eslint-utils": ["@eslint-community/eslint-utils@4.4.1", "", { "dependencies": { "eslint-visitor-keys": "^3.4.3" }, "peerDependencies": { "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" } }, "sha512-s3O3waFUrMV8P/XaF/+ZTp1X9XBZW1a4B97ZnjQF2KYWaFD2A8KyFBsrsfSjEmjn3RGWAIuvlneuZm3CUK3jbA=="],
|
||||||
|
|
||||||
|
"@eslint-community/regexpp": ["@eslint-community/regexpp@4.12.1", "", {}, "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ=="],
|
||||||
|
|
||||||
|
"@eslint/eslintrc": ["@eslint/eslintrc@2.1.4", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ=="],
|
||||||
|
|
||||||
|
"@eslint/js": ["@eslint/js@8.57.1", "", {}, "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q=="],
|
||||||
|
|
||||||
|
"@humanwhocodes/config-array": ["@humanwhocodes/config-array@0.13.0", "", { "dependencies": { "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" } }, "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw=="],
|
||||||
|
|
||||||
|
"@humanwhocodes/module-importer": ["@humanwhocodes/module-importer@1.0.1", "", {}, "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="],
|
||||||
|
|
||||||
|
"@humanwhocodes/object-schema": ["@humanwhocodes/object-schema@2.0.3", "", {}, "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA=="],
|
||||||
|
|
||||||
|
"@nodelib/fs.scandir": ["@nodelib/fs.scandir@2.1.5", "", { "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" } }, "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="],
|
||||||
|
|
||||||
|
"@nodelib/fs.stat": ["@nodelib/fs.stat@2.0.5", "", {}, "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="],
|
||||||
|
|
||||||
|
"@nodelib/fs.walk": ["@nodelib/fs.walk@1.2.8", "", { "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" } }, "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="],
|
||||||
|
|
||||||
|
"@pkgr/core": ["@pkgr/core@0.1.1", "", {}, "sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA=="],
|
||||||
|
|
||||||
|
"@scalar/openapi-types": ["@scalar/openapi-types@0.1.1", "", {}, "sha512-NMy3QNk6ytcCoPUGJH0t4NNr36OWXgZhA3ormr3TvhX1NDgoF95wFyodGVH8xiHeUyn2/FxtETm8UBLbB5xEmg=="],
|
||||||
|
|
||||||
|
"@scalar/themes": ["@scalar/themes@0.9.64", "", { "dependencies": { "@scalar/types": "0.0.30" } }, "sha512-hr9bCTdH9M/N8w31Td+IJVtbH+v0Ej31myW8QWhUfwYZe5qS815Tl1mp+qWFaObstOw5VX3zOtiZuuhF1zMIyw=="],
|
||||||
|
|
||||||
|
"@scalar/types": ["@scalar/types@0.0.12", "", { "dependencies": { "@scalar/openapi-types": "0.1.1", "@unhead/schema": "^1.9.5" } }, "sha512-XYZ36lSEx87i4gDqopQlGCOkdIITHHEvgkuJFrXFATQs9zHARop0PN0g4RZYWj+ZpCUclOcaOjbCt8JGe22mnQ=="],
|
||||||
|
|
||||||
|
"@sinclair/typebox": ["@sinclair/typebox@0.34.15", "", {}, "sha512-xeIzl3h1Znn9w/LTITqpiwag0gXjA+ldi2ZkXIBxGEppGCW211Tza+eL6D4pKqs10bj5z2umBWk5WL6spQ2OCQ=="],
|
||||||
|
|
||||||
|
"@types/jsonwebtoken": ["@types/jsonwebtoken@9.0.8", "", { "dependencies": { "@types/ms": "*", "@types/node": "*" } }, "sha512-7fx54m60nLFUVYlxAB1xpe9CBWX2vSrk50Y6ogRJ1v5xxtba7qXTg5BgYDN5dq+yuQQ9HaVlHJyAAt1/mxryFg=="],
|
||||||
|
|
||||||
|
"@types/ms": ["@types/ms@2.1.0", "", {}, "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA=="],
|
||||||
|
|
||||||
|
"@types/node": ["@types/node@20.17.17", "", { "dependencies": { "undici-types": "~6.19.2" } }, "sha512-/WndGO4kIfMicEQLTi/mDANUu/iVUhT7KboZPdEqqHQ4aTS+3qT3U5gIqWDFV+XouorjfgGqvKILJeHhuQgFYg=="],
|
||||||
|
|
||||||
|
"@types/sanitize-html": ["@types/sanitize-html@2.13.0", "", { "dependencies": { "htmlparser2": "^8.0.0" } }, "sha512-X31WxbvW9TjIhZZNyNBZ/p5ax4ti7qsNDBDEnH4zAgmEh35YnFD1UiS6z9Cd34kKm0LslFW0KPmTQzu/oGtsqQ=="],
|
||||||
|
|
||||||
|
"@types/triple-beam": ["@types/triple-beam@1.3.5", "", {}, "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw=="],
|
||||||
|
|
||||||
|
"@types/uuid": ["@types/uuid@10.0.0", "", {}, "sha512-7gqG38EyHgyP1S+7+xomFtL+ZNHcKv6DwNaCZmJmo1vgMugyF3TCnXVg4t1uk89mLNwnLtnY3TpOpCOyp1/xHQ=="],
|
||||||
|
|
||||||
|
"@types/ws": ["@types/ws@8.5.14", "", { "dependencies": { "@types/node": "*" } }, "sha512-bd/YFLW+URhBzMXurx7lWByOu+xzU9+kb3RboOteXYDfW+tr+JZa99OyNmPINEGB/ahzKrEuc8rcv4gnpJmxTw=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/eslint-plugin": ["@typescript-eslint/eslint-plugin@7.18.0", "", { "dependencies": { "@eslint-community/regexpp": "^4.10.0", "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/type-utils": "7.18.0", "@typescript-eslint/utils": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "graphemer": "^1.4.0", "ignore": "^5.3.1", "natural-compare": "^1.4.0", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "@typescript-eslint/parser": "^7.0.0", "eslint": "^8.56.0" } }, "sha512-94EQTWZ40mzBc42ATNIBimBEDltSJ9RQHCC8vc/PDbxi4k8dVwUAv4o98dk50M1zB+JGFxp43FP7f8+FP8R6Sw=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/parser": ["@typescript-eslint/parser@7.18.0", "", { "dependencies": { "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", "@typescript-eslint/typescript-estree": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "debug": "^4.3.4" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-4Z+L8I2OqhZV8qA132M4wNL30ypZGYOQVBfMgxDH/K5UX0PNqTu1c6za9ST5r9+tavvHiTWmBnKzpCJ/GlVFtg=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/scope-manager": ["@typescript-eslint/scope-manager@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0" } }, "sha512-jjhdIE/FPF2B7Z1uzc6i3oWKbGcHb87Qw7AWj6jmEqNOfDFbJWtjt/XfwCpvNkpGWlcJaog5vTR+VV8+w9JflA=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/type-utils": ["@typescript-eslint/type-utils@7.18.0", "", { "dependencies": { "@typescript-eslint/typescript-estree": "7.18.0", "@typescript-eslint/utils": "7.18.0", "debug": "^4.3.4", "ts-api-utils": "^1.3.0" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-XL0FJXuCLaDuX2sYqZUUSOJ2sG5/i1AAze+axqmLnSkNEVMVYLF+cbwlB2w8D1tinFuSikHmFta+P+HOofrLeA=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/types": ["@typescript-eslint/types@7.18.0", "", {}, "sha512-iZqi+Ds1y4EDYUtlOOC+aUmxnE9xS/yCigkjA7XpTKV6nCBd3Hp/PRGGmdwnfkV2ThMyYldP1wRpm/id99spTQ=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/typescript-estree": ["@typescript-eslint/typescript-estree@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "@typescript-eslint/visitor-keys": "7.18.0", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", "minimatch": "^9.0.4", "semver": "^7.6.0", "ts-api-utils": "^1.3.0" } }, "sha512-aP1v/BSPnnyhMHts8cf1qQ6Q1IFwwRvAQGRvBFkWlo3/lH29OXA3Pts+c10nxRxIBrDnoMqzhgdwVe5f2D6OzA=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/utils": ["@typescript-eslint/utils@7.18.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", "@typescript-eslint/scope-manager": "7.18.0", "@typescript-eslint/types": "7.18.0", "@typescript-eslint/typescript-estree": "7.18.0" }, "peerDependencies": { "eslint": "^8.56.0" } }, "sha512-kK0/rNa2j74XuHVcoCZxdFBMF+aq/vH83CXAOHieC+2Gis4mF8jJXT5eAfyD3K0sAxtPuwxaIOIOvhwzVDt/kw=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/visitor-keys": ["@typescript-eslint/visitor-keys@7.18.0", "", { "dependencies": { "@typescript-eslint/types": "7.18.0", "eslint-visitor-keys": "^3.4.3" } }, "sha512-cDF0/Gf81QpY3xYyJKDV14Zwdmid5+uuENhjH2EqFaF0ni+yAyq/LzMaIJdhNJXZI7uLzwIlA+V7oWoyn6Curg=="],
|
||||||
|
|
||||||
|
"@ungap/structured-clone": ["@ungap/structured-clone@1.3.0", "", {}, "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g=="],
|
||||||
|
|
||||||
|
"@unhead/schema": ["@unhead/schema@1.11.18", "", { "dependencies": { "hookable": "^5.5.3", "zhead": "^2.2.4" } }, "sha512-a3TA/OJCRdfbFhcA3Hq24k1ZU1o9szicESrw8DZcGyQFacHnh84mVgnyqSkMnwgCmfN4kvjSiTBlLEHS6+wATw=="],
|
||||||
|
|
||||||
|
"acorn": ["acorn@8.14.0", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA=="],
|
||||||
|
|
||||||
|
"acorn-jsx": ["acorn-jsx@5.3.2", "", { "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } }, "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="],
|
||||||
|
|
||||||
|
"ajv": ["ajv@6.12.6", "", { "dependencies": { "fast-deep-equal": "^3.1.1", "fast-json-stable-stringify": "^2.0.0", "json-schema-traverse": "^0.4.1", "uri-js": "^4.2.2" } }, "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="],
|
||||||
|
|
||||||
|
"ansi-regex": ["ansi-regex@5.0.1", "", {}, "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="],
|
||||||
|
|
||||||
|
"ansi-styles": ["ansi-styles@4.3.0", "", { "dependencies": { "color-convert": "^2.0.1" } }, "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="],
|
||||||
|
|
||||||
|
"argparse": ["argparse@2.0.1", "", {}, "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="],
|
||||||
|
|
||||||
|
"array-union": ["array-union@2.1.0", "", {}, "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw=="],
|
||||||
|
|
||||||
|
"asap": ["asap@2.0.6", "", {}, "sha512-BSHWgDSAiKs50o2Re8ppvp3seVHXSRM44cdSsT9FfNEUUZLOGWVCsiWaRPWM1Znn+mqZ1OfVZ3z3DWEzSp7hRA=="],
|
||||||
|
|
||||||
|
"async": ["async@3.2.6", "", {}, "sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA=="],
|
||||||
|
|
||||||
|
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
|
||||||
|
|
||||||
|
"balanced-match": ["balanced-match@1.0.2", "", {}, "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="],
|
||||||
|
|
||||||
|
"brace-expansion": ["brace-expansion@1.1.11", "", { "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA=="],
|
||||||
|
|
||||||
|
"braces": ["braces@3.0.3", "", { "dependencies": { "fill-range": "^7.1.1" } }, "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="],
|
||||||
|
|
||||||
|
"buffer-equal-constant-time": ["buffer-equal-constant-time@1.0.1", "", {}, "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="],
|
||||||
|
|
||||||
|
"bun-types": ["bun-types@1.2.2", "", { "dependencies": { "@types/node": "*", "@types/ws": "~8.5.10" } }, "sha512-RCbMH5elr9gjgDGDhkTTugA21XtJAy/9jkKe/G3WR2q17VPGhcquf9Sir6uay9iW+7P/BV0CAHA1XlHXMAVKHg=="],
|
||||||
|
|
||||||
|
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.1", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g=="],
|
||||||
|
|
||||||
|
"call-bound": ["call-bound@1.0.3", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "get-intrinsic": "^1.2.6" } }, "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA=="],
|
||||||
|
|
||||||
|
"callsites": ["callsites@3.1.0", "", {}, "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="],
|
||||||
|
|
||||||
|
"chalk": ["chalk@4.1.2", "", { "dependencies": { "ansi-styles": "^4.1.0", "supports-color": "^7.1.0" } }, "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="],
|
||||||
|
|
||||||
|
"color": ["color@3.2.1", "", { "dependencies": { "color-convert": "^1.9.3", "color-string": "^1.6.0" } }, "sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA=="],
|
||||||
|
|
||||||
|
"color-convert": ["color-convert@2.0.1", "", { "dependencies": { "color-name": "~1.1.4" } }, "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="],
|
||||||
|
|
||||||
|
"color-name": ["color-name@1.1.4", "", {}, "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="],
|
||||||
|
|
||||||
|
"color-string": ["color-string@1.9.1", "", { "dependencies": { "color-name": "^1.0.0", "simple-swizzle": "^0.2.2" } }, "sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg=="],
|
||||||
|
|
||||||
|
"colorspace": ["colorspace@1.1.4", "", { "dependencies": { "color": "^3.1.3", "text-hex": "1.0.x" } }, "sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w=="],
|
||||||
|
|
||||||
|
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
|
||||||
|
|
||||||
|
"component-emitter": ["component-emitter@1.3.1", "", {}, "sha512-T0+barUSQRTUQASh8bx02dl+DhF54GtIDY13Y3m9oWTklKbb3Wv974meRpeZ3lp1JpLVECWWNHC4vaG2XHXouQ=="],
|
||||||
|
|
||||||
|
"concat-map": ["concat-map@0.0.1", "", {}, "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="],
|
||||||
|
|
||||||
|
"cookie": ["cookie@1.0.2", "", {}, "sha512-9Kr/j4O16ISv8zBBhJoi4bXOYNTkFLOqSL3UDB0njXxCXNezjeyVrJyGOWtgfs/q2km1gwBcfH8q1yEGoMYunA=="],
|
||||||
|
|
||||||
|
"cookiejar": ["cookiejar@2.1.4", "", {}, "sha512-LDx6oHrK+PhzLKJU9j5S7/Y3jM/mUHvD/DeI1WQmJn652iPC5Y4TBzC9l+5OMOXlyTTA+SmVUPm0HQUwpD5Jqw=="],
|
||||||
|
|
||||||
|
"cross-spawn": ["cross-spawn@7.0.6", "", { "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", "which": "^2.0.1" } }, "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="],
|
||||||
|
|
||||||
|
"data-uri-to-buffer": ["data-uri-to-buffer@4.0.1", "", {}, "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A=="],
|
||||||
|
|
||||||
|
"debug": ["debug@4.4.0", "", { "dependencies": { "ms": "^2.1.3" } }, "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA=="],
|
||||||
|
|
||||||
|
"deep-is": ["deep-is@0.1.4", "", {}, "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="],
|
||||||
|
|
||||||
|
"deepmerge": ["deepmerge@4.3.1", "", {}, "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A=="],
|
||||||
|
|
||||||
|
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
|
||||||
|
|
||||||
|
"dezalgo": ["dezalgo@1.0.4", "", { "dependencies": { "asap": "^2.0.0", "wrappy": "1" } }, "sha512-rXSP0bf+5n0Qonsb+SVVfNfIsimO4HEtmnIpPHY8Q1UCzKlQrDMfdobr8nJOOsRgWCyMRqeSBQzmWUMq7zvVig=="],
|
||||||
|
|
||||||
|
"dir-glob": ["dir-glob@3.0.1", "", { "dependencies": { "path-type": "^4.0.0" } }, "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA=="],
|
||||||
|
|
||||||
|
"doctrine": ["doctrine@3.0.0", "", { "dependencies": { "esutils": "^2.0.2" } }, "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w=="],
|
||||||
|
|
||||||
|
"dom-serializer": ["dom-serializer@2.0.0", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.2", "entities": "^4.2.0" } }, "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg=="],
|
||||||
|
|
||||||
|
"domelementtype": ["domelementtype@2.3.0", "", {}, "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw=="],
|
||||||
|
|
||||||
|
"domhandler": ["domhandler@5.0.3", "", { "dependencies": { "domelementtype": "^2.3.0" } }, "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w=="],
|
||||||
|
|
||||||
|
"domutils": ["domutils@3.2.2", "", { "dependencies": { "dom-serializer": "^2.0.0", "domelementtype": "^2.3.0", "domhandler": "^5.0.3" } }, "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw=="],
|
||||||
|
|
||||||
|
"dotenv": ["dotenv@16.4.7", "", {}, "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ=="],
|
||||||
|
|
||||||
|
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
|
||||||
|
|
||||||
|
"ecdsa-sig-formatter": ["ecdsa-sig-formatter@1.0.11", "", { "dependencies": { "safe-buffer": "^5.0.1" } }, "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ=="],
|
||||||
|
|
||||||
|
"elysia": ["elysia@1.2.12", "", { "dependencies": { "@sinclair/typebox": "^0.34.15", "cookie": "^1.0.2", "memoirist": "^0.3.0", "openapi-types": "^12.1.3" }, "peerDependencies": { "typescript": ">= 5.0.0" }, "optionalPeers": ["typescript"] }, "sha512-X1bZo09qe8/Poa/5tz08Y+sE/77B/wLwnA5xDDENU3FCrsUtYJuBVcy6BPXGRCgnJ1fPQpc0Ov2ZU5MYJXluTg=="],
|
||||||
|
|
||||||
|
"enabled": ["enabled@2.0.0", "", {}, "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ=="],
|
||||||
|
|
||||||
|
"entities": ["entities@4.5.0", "", {}, "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw=="],
|
||||||
|
|
||||||
|
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
|
||||||
|
|
||||||
|
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
|
||||||
|
|
||||||
|
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
|
||||||
|
|
||||||
|
"escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="],
|
||||||
|
|
||||||
|
"eslint": ["eslint@8.57.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", "@eslint/js": "8.57.1", "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.2", "debug": "^4.3.2", "doctrine": "^3.0.0", "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.2", "eslint-visitor-keys": "^3.4.3", "espree": "^9.6.1", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "globals": "^13.19.0", "graphemer": "^1.4.0", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "is-path-inside": "^3.0.3", "js-yaml": "^4.1.0", "json-stable-stringify-without-jsonify": "^1.0.1", "levn": "^0.4.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "text-table": "^0.2.0" }, "bin": { "eslint": "bin/eslint.js" } }, "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA=="],
|
||||||
|
|
||||||
|
"eslint-config-prettier": ["eslint-config-prettier@9.1.0", "", { "peerDependencies": { "eslint": ">=7.0.0" }, "bin": { "eslint-config-prettier": "bin/cli.js" } }, "sha512-NSWl5BFQWEPi1j4TjVNItzYV7dZXZ+wP6I6ZhrBGpChQhZRUaElihE9uRRkcbRnNb76UMKDF3r+WTmNcGPKsqw=="],
|
||||||
|
|
||||||
|
"eslint-plugin-prettier": ["eslint-plugin-prettier@5.2.3", "", { "dependencies": { "prettier-linter-helpers": "^1.0.0", "synckit": "^0.9.1" }, "peerDependencies": { "@types/eslint": ">=8.0.0", "eslint": ">=8.0.0", "eslint-config-prettier": "*", "prettier": ">=3.0.0" }, "optionalPeers": ["@types/eslint", "eslint-config-prettier"] }, "sha512-qJ+y0FfCp/mQYQ/vWQ3s7eUlFEL4PyKfAJxsnYTJ4YT73nsJBWqmEpFryxV9OeUiqmsTsYJ5Y+KDNaeP31wrRw=="],
|
||||||
|
|
||||||
|
"eslint-scope": ["eslint-scope@7.2.2", "", { "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" } }, "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg=="],
|
||||||
|
|
||||||
|
"eslint-visitor-keys": ["eslint-visitor-keys@3.4.3", "", {}, "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="],
|
||||||
|
|
||||||
|
"espree": ["espree@9.6.1", "", { "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" } }, "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ=="],
|
||||||
|
|
||||||
|
"esquery": ["esquery@1.6.0", "", { "dependencies": { "estraverse": "^5.1.0" } }, "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="],
|
||||||
|
|
||||||
|
"esrecurse": ["esrecurse@4.3.0", "", { "dependencies": { "estraverse": "^5.2.0" } }, "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="],
|
||||||
|
|
||||||
|
"estraverse": ["estraverse@5.3.0", "", {}, "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="],
|
||||||
|
|
||||||
|
"esutils": ["esutils@2.0.3", "", {}, "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="],
|
||||||
|
|
||||||
|
"fast-deep-equal": ["fast-deep-equal@3.1.3", "", {}, "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="],
|
||||||
|
|
||||||
|
"fast-diff": ["fast-diff@1.3.0", "", {}, "sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw=="],
|
||||||
|
|
||||||
|
"fast-glob": ["fast-glob@3.3.3", "", { "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", "glob-parent": "^5.1.2", "merge2": "^1.3.0", "micromatch": "^4.0.8" } }, "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="],
|
||||||
|
|
||||||
|
"fast-json-stable-stringify": ["fast-json-stable-stringify@2.1.0", "", {}, "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="],
|
||||||
|
|
||||||
|
"fast-levenshtein": ["fast-levenshtein@2.0.6", "", {}, "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="],
|
||||||
|
|
||||||
|
"fast-safe-stringify": ["fast-safe-stringify@2.1.1", "", {}, "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA=="],
|
||||||
|
|
||||||
|
"fastq": ["fastq@1.19.0", "", { "dependencies": { "reusify": "^1.0.4" } }, "sha512-7SFSRCNjBQIZH/xZR3iy5iQYR8aGBE0h3VG6/cwlbrpdciNYBMotQav8c1XI3HjHH+NikUpP53nPdlZSdWmFzA=="],
|
||||||
|
|
||||||
|
"fecha": ["fecha@4.2.3", "", {}, "sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw=="],
|
||||||
|
|
||||||
|
"fetch-blob": ["fetch-blob@3.2.0", "", { "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" } }, "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ=="],
|
||||||
|
|
||||||
|
"file-entry-cache": ["file-entry-cache@6.0.1", "", { "dependencies": { "flat-cache": "^3.0.4" } }, "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg=="],
|
||||||
|
|
||||||
|
"file-stream-rotator": ["file-stream-rotator@0.6.1", "", { "dependencies": { "moment": "^2.29.1" } }, "sha512-u+dBid4PvZw17PmDeRcNOtCP9CCK/9lRN2w+r1xIS7yOL9JFrIBKTvrYsxT4P0pGtThYTn++QS5ChHaUov3+zQ=="],
|
||||||
|
|
||||||
|
"fill-range": ["fill-range@7.1.1", "", { "dependencies": { "to-regex-range": "^5.0.1" } }, "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="],
|
||||||
|
|
||||||
|
"find-up": ["find-up@5.0.0", "", { "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" } }, "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="],
|
||||||
|
|
||||||
|
"flat-cache": ["flat-cache@3.2.0", "", { "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", "rimraf": "^3.0.2" } }, "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw=="],
|
||||||
|
|
||||||
|
"flatted": ["flatted@3.3.2", "", {}, "sha512-AiwGJM8YcNOaobumgtng+6NHuOqC3A7MixFeDafM3X9cIUM+xUXoS5Vfgf+OihAYe20fxqNM9yPBXJzRtZ/4eA=="],
|
||||||
|
|
||||||
|
"fn.name": ["fn.name@1.1.0", "", {}, "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw=="],
|
||||||
|
|
||||||
|
"form-data": ["form-data@4.0.1", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "mime-types": "^2.1.12" } }, "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw=="],
|
||||||
|
|
||||||
|
"formdata-polyfill": ["formdata-polyfill@4.0.10", "", { "dependencies": { "fetch-blob": "^3.1.2" } }, "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g=="],
|
||||||
|
|
||||||
|
"formidable": ["formidable@2.1.2", "", { "dependencies": { "dezalgo": "^1.0.4", "hexoid": "^1.0.0", "once": "^1.4.0", "qs": "^6.11.0" } }, "sha512-CM3GuJ57US06mlpQ47YcunuUZ9jpm8Vx+P2CGt2j7HpgkKZO/DJYQ0Bobim8G6PFQmK5lOqOOdUXboU+h73A4g=="],
|
||||||
|
|
||||||
|
"fs.realpath": ["fs.realpath@1.0.0", "", {}, "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="],
|
||||||
|
|
||||||
|
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
|
||||||
|
|
||||||
|
"get-intrinsic": ["get-intrinsic@1.2.7", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.0.0", "function-bind": "^1.1.2", "get-proto": "^1.0.0", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-VW6Pxhsrk0KAOqs3WEd0klDiF/+V7gQOpAvY1jVU/LHmaD/kQO4523aiJuikX/QAKYiW6x8Jh+RJej1almdtCA=="],
|
||||||
|
|
||||||
|
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
|
||||||
|
|
||||||
|
"glob": ["glob@7.2.3", "", { "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="],
|
||||||
|
|
||||||
|
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
|
||||||
|
|
||||||
|
"globals": ["globals@13.24.0", "", { "dependencies": { "type-fest": "^0.20.2" } }, "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ=="],
|
||||||
|
|
||||||
|
"globby": ["globby@11.1.0", "", { "dependencies": { "array-union": "^2.1.0", "dir-glob": "^3.0.1", "fast-glob": "^3.2.9", "ignore": "^5.2.0", "merge2": "^1.4.1", "slash": "^3.0.0" } }, "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g=="],
|
||||||
|
|
||||||
|
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
|
||||||
|
|
||||||
|
"graphemer": ["graphemer@1.4.0", "", {}, "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="],
|
||||||
|
|
||||||
|
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||||
|
|
||||||
|
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
|
||||||
|
|
||||||
|
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
|
||||||
|
|
||||||
|
"helmet": ["helmet@7.2.0", "", {}, "sha512-ZRiwvN089JfMXokizgqEPXsl2Guk094yExfoDXR0cBYWxtBbaSww/w+vT4WEJsBW2iTUi1GgZ6swmoug3Oy4Xw=="],
|
||||||
|
|
||||||
|
"hexoid": ["hexoid@1.0.0", "", {}, "sha512-QFLV0taWQOZtvIRIAdBChesmogZrtuXvVWsFHZTk2SU+anspqZ2vMnoLg7IE1+Uk16N19APic1BuF8bC8c2m5g=="],
|
||||||
|
|
||||||
|
"hookable": ["hookable@5.5.3", "", {}, "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ=="],
|
||||||
|
|
||||||
|
"htmlparser2": ["htmlparser2@8.0.2", "", { "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", "domutils": "^3.0.1", "entities": "^4.4.0" } }, "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA=="],
|
||||||
|
|
||||||
|
"husky": ["husky@9.1.7", "", { "bin": { "husky": "bin.js" } }, "sha512-5gs5ytaNjBrh5Ow3zrvdUUY+0VxIuWVL4i9irt6friV+BqdCfmV11CQTWMiBYWHbXhco+J1kHfTOUkePhCDvMA=="],
|
||||||
|
|
||||||
|
"ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
|
||||||
|
|
||||||
|
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],
|
||||||
|
|
||||||
|
"imurmurhash": ["imurmurhash@0.1.4", "", {}, "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="],
|
||||||
|
|
||||||
|
"inflight": ["inflight@1.0.6", "", { "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="],
|
||||||
|
|
||||||
|
"inherits": ["inherits@2.0.4", "", {}, "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="],
|
||||||
|
|
||||||
|
"is-arrayish": ["is-arrayish@0.3.2", "", {}, "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ=="],
|
||||||
|
|
||||||
|
"is-extglob": ["is-extglob@2.1.1", "", {}, "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="],
|
||||||
|
|
||||||
|
"is-glob": ["is-glob@4.0.3", "", { "dependencies": { "is-extglob": "^2.1.1" } }, "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="],
|
||||||
|
|
||||||
|
"is-number": ["is-number@7.0.0", "", {}, "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="],
|
||||||
|
|
||||||
|
"is-path-inside": ["is-path-inside@3.0.3", "", {}, "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="],
|
||||||
|
|
||||||
|
"is-plain-object": ["is-plain-object@5.0.0", "", {}, "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q=="],
|
||||||
|
|
||||||
|
"is-stream": ["is-stream@2.0.1", "", {}, "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg=="],
|
||||||
|
|
||||||
|
"isexe": ["isexe@2.0.0", "", {}, "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="],
|
||||||
|
|
||||||
|
"js-yaml": ["js-yaml@4.1.0", "", { "dependencies": { "argparse": "^2.0.1" }, "bin": { "js-yaml": "bin/js-yaml.js" } }, "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="],
|
||||||
|
|
||||||
|
"json-buffer": ["json-buffer@3.0.1", "", {}, "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="],
|
||||||
|
|
||||||
|
"json-schema-traverse": ["json-schema-traverse@0.4.1", "", {}, "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="],
|
||||||
|
|
||||||
|
"json-stable-stringify-without-jsonify": ["json-stable-stringify-without-jsonify@1.0.1", "", {}, "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="],
|
||||||
|
|
||||||
|
"jsonwebtoken": ["jsonwebtoken@9.0.2", "", { "dependencies": { "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", "lodash.isnumber": "^3.0.3", "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", "ms": "^2.1.1", "semver": "^7.5.4" } }, "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ=="],
|
||||||
|
|
||||||
|
"jwa": ["jwa@1.4.1", "", { "dependencies": { "buffer-equal-constant-time": "1.0.1", "ecdsa-sig-formatter": "1.0.11", "safe-buffer": "^5.0.1" } }, "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA=="],
|
||||||
|
|
||||||
|
"jws": ["jws@3.2.2", "", { "dependencies": { "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } }, "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA=="],
|
||||||
|
|
||||||
|
"keyv": ["keyv@4.5.4", "", { "dependencies": { "json-buffer": "3.0.1" } }, "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="],
|
||||||
|
|
||||||
|
"kuler": ["kuler@2.0.0", "", {}, "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A=="],
|
||||||
|
|
||||||
|
"levn": ["levn@0.4.1", "", { "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" } }, "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="],
|
||||||
|
|
||||||
|
"locate-path": ["locate-path@6.0.0", "", { "dependencies": { "p-locate": "^5.0.0" } }, "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="],
|
||||||
|
|
||||||
|
"lodash.includes": ["lodash.includes@4.3.0", "", {}, "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w=="],
|
||||||
|
|
||||||
|
"lodash.isboolean": ["lodash.isboolean@3.0.3", "", {}, "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg=="],
|
||||||
|
|
||||||
|
"lodash.isinteger": ["lodash.isinteger@4.0.4", "", {}, "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA=="],
|
||||||
|
|
||||||
|
"lodash.isnumber": ["lodash.isnumber@3.0.3", "", {}, "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw=="],
|
||||||
|
|
||||||
|
"lodash.isplainobject": ["lodash.isplainobject@4.0.6", "", {}, "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA=="],
|
||||||
|
|
||||||
|
"lodash.isstring": ["lodash.isstring@4.0.1", "", {}, "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw=="],
|
||||||
|
|
||||||
|
"lodash.merge": ["lodash.merge@4.6.2", "", {}, "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="],
|
||||||
|
|
||||||
|
"lodash.once": ["lodash.once@4.1.1", "", {}, "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg=="],
|
||||||
|
|
||||||
|
"logform": ["logform@2.7.0", "", { "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", "fecha": "^4.2.0", "ms": "^2.1.1", "safe-stable-stringify": "^2.3.1", "triple-beam": "^1.3.0" } }, "sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ=="],
|
||||||
|
|
||||||
|
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
|
||||||
|
|
||||||
|
"memoirist": ["memoirist@0.3.0", "", {}, "sha512-wR+4chMgVPq+T6OOsk40u9Wlpw1Pjx66NMNiYxCQQ4EUJ7jDs3D9kTCeKdBOkvAiqXlHLVJlvYL01PvIJ1MPNg=="],
|
||||||
|
|
||||||
|
"merge2": ["merge2@1.4.1", "", {}, "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="],
|
||||||
|
|
||||||
|
"methods": ["methods@1.1.2", "", {}, "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w=="],
|
||||||
|
|
||||||
|
"micromatch": ["micromatch@4.0.8", "", { "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" } }, "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="],
|
||||||
|
|
||||||
|
"mime": ["mime@2.6.0", "", { "bin": { "mime": "cli.js" } }, "sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg=="],
|
||||||
|
|
||||||
|
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
|
||||||
|
|
||||||
|
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
|
||||||
|
|
||||||
|
"minimatch": ["minimatch@3.1.2", "", { "dependencies": { "brace-expansion": "^1.1.7" } }, "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="],
|
||||||
|
|
||||||
|
"moment": ["moment@2.30.1", "", {}, "sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how=="],
|
||||||
|
|
||||||
|
"ms": ["ms@2.1.3", "", {}, "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="],
|
||||||
|
|
||||||
|
"nanoid": ["nanoid@3.3.8", "", { "bin": { "nanoid": "bin/nanoid.cjs" } }, "sha512-WNLf5Sd8oZxOm+TzppcYk8gVOgP+l58xNy58D0nbUnOxOWRWvlcCV4kUF7ltmI6PsrLl/BgKEyS4mqsGChFN0w=="],
|
||||||
|
|
||||||
|
"natural-compare": ["natural-compare@1.4.0", "", {}, "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="],
|
||||||
|
|
||||||
|
"node-domexception": ["node-domexception@1.0.0", "", {}, "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ=="],
|
||||||
|
|
||||||
|
"node-fetch": ["node-fetch@3.3.2", "", { "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", "formdata-polyfill": "^4.0.10" } }, "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA=="],
|
||||||
|
|
||||||
|
"object-hash": ["object-hash@3.0.0", "", {}, "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw=="],
|
||||||
|
|
||||||
|
"object-inspect": ["object-inspect@1.13.3", "", {}, "sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA=="],
|
||||||
|
|
||||||
|
"once": ["once@1.4.0", "", { "dependencies": { "wrappy": "1" } }, "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="],
|
||||||
|
|
||||||
|
"one-time": ["one-time@1.0.0", "", { "dependencies": { "fn.name": "1.x.x" } }, "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g=="],
|
||||||
|
|
||||||
|
"openapi-types": ["openapi-types@12.1.3", "", {}, "sha512-N4YtSYJqghVu4iek2ZUvcN/0aqH1kRDuNqzcycDxhOUpg7GdvLa2F3DgS6yBNhInhv2r/6I0Flkn7CqL8+nIcw=="],
|
||||||
|
|
||||||
|
"optionator": ["optionator@0.9.4", "", { "dependencies": { "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", "type-check": "^0.4.0", "word-wrap": "^1.2.5" } }, "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="],
|
||||||
|
|
||||||
|
"p-limit": ["p-limit@3.1.0", "", { "dependencies": { "yocto-queue": "^0.1.0" } }, "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="],
|
||||||
|
|
||||||
|
"p-locate": ["p-locate@5.0.0", "", { "dependencies": { "p-limit": "^3.0.2" } }, "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="],
|
||||||
|
|
||||||
|
"parent-module": ["parent-module@1.0.1", "", { "dependencies": { "callsites": "^3.0.0" } }, "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="],
|
||||||
|
|
||||||
|
"parse-srcset": ["parse-srcset@1.0.2", "", {}, "sha512-/2qh0lav6CmI15FzA3i/2Bzk2zCgQhGMkvhOhKNcBVQ1ldgpbfiNTVslmooUmWJcADi1f1kIeynbDRVzNlfR6Q=="],
|
||||||
|
|
||||||
|
"path-exists": ["path-exists@4.0.0", "", {}, "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="],
|
||||||
|
|
||||||
|
"path-is-absolute": ["path-is-absolute@1.0.1", "", {}, "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="],
|
||||||
|
|
||||||
|
"path-key": ["path-key@3.1.1", "", {}, "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="],
|
||||||
|
|
||||||
|
"path-type": ["path-type@4.0.0", "", {}, "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw=="],
|
||||||
|
|
||||||
|
"pathe": ["pathe@1.1.2", "", {}, "sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ=="],
|
||||||
|
|
||||||
|
"picocolors": ["picocolors@1.1.1", "", {}, "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="],
|
||||||
|
|
||||||
|
"picomatch": ["picomatch@2.3.1", "", {}, "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="],
|
||||||
|
|
||||||
|
"postcss": ["postcss@8.5.1", "", { "dependencies": { "nanoid": "^3.3.8", "picocolors": "^1.1.1", "source-map-js": "^1.2.1" } }, "sha512-6oz2beyjc5VMn/KV1pPw8fliQkhBXrVn1Z3TVyqZxU8kZpzEKhBdmCFqI6ZbmGtamQvQGuU1sgPTk8ZrXDD7jQ=="],
|
||||||
|
|
||||||
|
"prelude-ls": ["prelude-ls@1.2.1", "", {}, "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="],
|
||||||
|
|
||||||
|
"prettier": ["prettier@3.4.2", "", { "bin": { "prettier": "bin/prettier.cjs" } }, "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ=="],
|
||||||
|
|
||||||
|
"prettier-linter-helpers": ["prettier-linter-helpers@1.0.0", "", { "dependencies": { "fast-diff": "^1.1.2" } }, "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w=="],
|
||||||
|
|
||||||
|
"punycode": ["punycode@2.3.1", "", {}, "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="],
|
||||||
|
|
||||||
|
"qs": ["qs@6.14.0", "", { "dependencies": { "side-channel": "^1.1.0" } }, "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w=="],
|
||||||
|
|
||||||
|
"queue-microtask": ["queue-microtask@1.2.3", "", {}, "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="],
|
||||||
|
|
||||||
|
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
|
||||||
|
|
||||||
|
"resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="],
|
||||||
|
|
||||||
|
"reusify": ["reusify@1.0.4", "", {}, "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw=="],
|
||||||
|
|
||||||
|
"rimraf": ["rimraf@3.0.2", "", { "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" } }, "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA=="],
|
||||||
|
|
||||||
|
"run-parallel": ["run-parallel@1.2.0", "", { "dependencies": { "queue-microtask": "^1.2.2" } }, "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="],
|
||||||
|
|
||||||
|
"safe-buffer": ["safe-buffer@5.2.1", "", {}, "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ=="],
|
||||||
|
|
||||||
|
"safe-stable-stringify": ["safe-stable-stringify@2.5.0", "", {}, "sha512-b3rppTKm9T+PsVCBEOUR46GWI7fdOs00VKZ1+9c1EWDaDMvjQc6tUwuFyIprgGgTcWoVHSKrU8H31ZHA2e0RHA=="],
|
||||||
|
|
||||||
|
"sanitize-html": ["sanitize-html@2.14.0", "", { "dependencies": { "deepmerge": "^4.2.2", "escape-string-regexp": "^4.0.0", "htmlparser2": "^8.0.0", "is-plain-object": "^5.0.0", "parse-srcset": "^1.0.2", "postcss": "^8.3.11" } }, "sha512-CafX+IUPxZshXqqRaG9ZClSlfPVjSxI0td7n07hk8QO2oO+9JDnlcL8iM8TWeOXOIBFgIOx6zioTzM53AOMn3g=="],
|
||||||
|
|
||||||
|
"semver": ["semver@7.7.1", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-hlq8tAfn0m/61p4BVRcPzIGr6LKiMwo4VM6dGi6pt4qcRkmNzTcWq6eCEjEh+qXjkMDvPlOFFSGwQjoEa6gyMA=="],
|
||||||
|
|
||||||
|
"shebang-command": ["shebang-command@2.0.0", "", { "dependencies": { "shebang-regex": "^3.0.0" } }, "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="],
|
||||||
|
|
||||||
|
"shebang-regex": ["shebang-regex@3.0.0", "", {}, "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="],
|
||||||
|
|
||||||
|
"side-channel": ["side-channel@1.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3", "side-channel-list": "^1.0.0", "side-channel-map": "^1.0.1", "side-channel-weakmap": "^1.0.2" } }, "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw=="],
|
||||||
|
|
||||||
|
"side-channel-list": ["side-channel-list@1.0.0", "", { "dependencies": { "es-errors": "^1.3.0", "object-inspect": "^1.13.3" } }, "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA=="],
|
||||||
|
|
||||||
|
"side-channel-map": ["side-channel-map@1.0.1", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3" } }, "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA=="],
|
||||||
|
|
||||||
|
"side-channel-weakmap": ["side-channel-weakmap@1.0.2", "", { "dependencies": { "call-bound": "^1.0.2", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.5", "object-inspect": "^1.13.3", "side-channel-map": "^1.0.1" } }, "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A=="],
|
||||||
|
|
||||||
|
"simple-swizzle": ["simple-swizzle@0.2.2", "", { "dependencies": { "is-arrayish": "^0.3.1" } }, "sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg=="],
|
||||||
|
|
||||||
|
"slash": ["slash@3.0.0", "", {}, "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q=="],
|
||||||
|
|
||||||
|
"source-map-js": ["source-map-js@1.2.1", "", {}, "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA=="],
|
||||||
|
|
||||||
|
"stack-trace": ["stack-trace@0.0.10", "", {}, "sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg=="],
|
||||||
|
|
||||||
|
"string_decoder": ["string_decoder@1.3.0", "", { "dependencies": { "safe-buffer": "~5.2.0" } }, "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA=="],
|
||||||
|
|
||||||
|
"strip-ansi": ["strip-ansi@6.0.1", "", { "dependencies": { "ansi-regex": "^5.0.1" } }, "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="],
|
||||||
|
|
||||||
|
"strip-json-comments": ["strip-json-comments@3.1.1", "", {}, "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="],
|
||||||
|
|
||||||
|
"superagent": ["superagent@8.1.2", "", { "dependencies": { "component-emitter": "^1.3.0", "cookiejar": "^2.1.4", "debug": "^4.3.4", "fast-safe-stringify": "^2.1.1", "form-data": "^4.0.0", "formidable": "^2.1.2", "methods": "^1.1.2", "mime": "2.6.0", "qs": "^6.11.0", "semver": "^7.3.8" } }, "sha512-6WTxW1EB6yCxV5VFOIPQruWGHqc3yI7hEmZK6h+pyk69Lk/Ut7rLUY6W/ONF2MjBuGjvmMiIpsrVJ2vjrHlslA=="],
|
||||||
|
|
||||||
|
"supertest": ["supertest@6.3.4", "", { "dependencies": { "methods": "^1.1.2", "superagent": "^8.1.2" } }, "sha512-erY3HFDG0dPnhw4U+udPfrzXa4xhSG+n4rxfRuZWCUvjFWwKl+OxWf/7zk50s84/fAAs7vf5QAb9uRa0cCykxw=="],
|
||||||
|
|
||||||
|
"supports-color": ["supports-color@7.2.0", "", { "dependencies": { "has-flag": "^4.0.0" } }, "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="],
|
||||||
|
|
||||||
|
"synckit": ["synckit@0.9.2", "", { "dependencies": { "@pkgr/core": "^0.1.0", "tslib": "^2.6.2" } }, "sha512-vrozgXDQwYO72vHjUb/HnFbQx1exDjoKzqx23aXEg2a9VIg2TSFZ8FmeZpTjUCFMYw7mpX4BE2SFu8wI7asYsw=="],
|
||||||
|
|
||||||
|
"text-hex": ["text-hex@1.0.0", "", {}, "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg=="],
|
||||||
|
|
||||||
|
"text-table": ["text-table@0.2.0", "", {}, "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="],
|
||||||
|
|
||||||
|
"to-regex-range": ["to-regex-range@5.0.1", "", { "dependencies": { "is-number": "^7.0.0" } }, "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="],
|
||||||
|
|
||||||
|
"triple-beam": ["triple-beam@1.4.1", "", {}, "sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg=="],
|
||||||
|
|
||||||
|
"ts-api-utils": ["ts-api-utils@1.4.3", "", { "peerDependencies": { "typescript": ">=4.2.0" } }, "sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw=="],
|
||||||
|
|
||||||
|
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
|
||||||
|
|
||||||
|
"type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="],
|
||||||
|
|
||||||
|
"type-fest": ["type-fest@0.20.2", "", {}, "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ=="],
|
||||||
|
|
||||||
|
"typescript": ["typescript@5.7.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw=="],
|
||||||
|
|
||||||
|
"undici-types": ["undici-types@6.19.8", "", {}, "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw=="],
|
||||||
|
|
||||||
|
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||||
|
|
||||||
|
"util-deprecate": ["util-deprecate@1.0.2", "", {}, "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="],
|
||||||
|
|
||||||
|
"uuid": ["uuid@11.0.5", "", { "bin": { "uuid": "dist/esm/bin/uuid" } }, "sha512-508e6IcKLrhxKdBbcA2b4KQZlLVp2+J5UwQ6F7Drckkc5N9ZJwFa4TgWtsww9UG8fGHbm6gbV19TdM5pQ4GaIA=="],
|
||||||
|
|
||||||
|
"web-streams-polyfill": ["web-streams-polyfill@3.3.3", "", {}, "sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw=="],
|
||||||
|
|
||||||
|
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||||
|
|
||||||
|
"winston": ["winston@3.17.0", "", { "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", "logform": "^2.7.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", "stack-trace": "0.0.x", "triple-beam": "^1.3.0", "winston-transport": "^4.9.0" } }, "sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw=="],
|
||||||
|
|
||||||
|
"winston-daily-rotate-file": ["winston-daily-rotate-file@5.0.0", "", { "dependencies": { "file-stream-rotator": "^0.6.1", "object-hash": "^3.0.0", "triple-beam": "^1.4.1", "winston-transport": "^4.7.0" }, "peerDependencies": { "winston": "^3" } }, "sha512-JDjiXXkM5qvwY06733vf09I2wnMXpZEhxEVOSPenZMii+g7pcDcTBt2MRugnoi8BwVSuCT2jfRXBUy+n1Zz/Yw=="],
|
||||||
|
|
||||||
|
"winston-transport": ["winston-transport@4.9.0", "", { "dependencies": { "logform": "^2.7.0", "readable-stream": "^3.6.2", "triple-beam": "^1.3.0" } }, "sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A=="],
|
||||||
|
|
||||||
|
"word-wrap": ["word-wrap@1.2.5", "", {}, "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="],
|
||||||
|
|
||||||
|
"wrappy": ["wrappy@1.0.2", "", {}, "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="],
|
||||||
|
|
||||||
|
"ws": ["ws@8.18.0", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": ">=5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-8VbfWfHLbbwu3+N6OKsOMpBdT4kXPDDB9cJk2bJ6mh9ucxdlnNvH1e+roYkKmN9Nxw2yjz7VzeO9oOz2zJ04Pw=="],
|
||||||
|
|
||||||
|
"yocto-queue": ["yocto-queue@0.1.0", "", {}, "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="],
|
||||||
|
|
||||||
|
"zhead": ["zhead@2.2.4", "", {}, "sha512-8F0OI5dpWIA5IGG5NHUg9staDwz/ZPxZtvGVf01j7vHqSyZ0raHY+78atOVxRqb73AotX22uV1pXt3gYSstGag=="],
|
||||||
|
|
||||||
|
"zod": ["zod@3.24.1", "", {}, "sha512-muH7gBL9sI1nciMZV67X5fTKKBLtwpZ5VBp1vsOQzj1MhrBZ4wlVCm3gedKZWLp0Oyel8sIGfeiz54Su+OVT+A=="],
|
||||||
|
|
||||||
|
"@scalar/themes/@scalar/types": ["@scalar/types@0.0.30", "", { "dependencies": { "@scalar/openapi-types": "0.1.7", "@unhead/schema": "^1.11.11" } }, "sha512-rhgwovQb5f7PXuUB5bLUElpo90fdsiwcOgBXVWZ6n6dnFSKovNJ7GPXQimsZioMzTF6TdwfP94UpZVdZAK4aTw=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/typescript-estree/minimatch": ["minimatch@9.0.5", "", { "dependencies": { "brace-expansion": "^2.0.1" } }, "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="],
|
||||||
|
|
||||||
|
"color/color-convert": ["color-convert@1.9.3", "", { "dependencies": { "color-name": "1.1.3" } }, "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg=="],
|
||||||
|
|
||||||
|
"color-string/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||||
|
|
||||||
|
"fast-glob/glob-parent": ["glob-parent@5.1.2", "", { "dependencies": { "is-glob": "^4.0.1" } }, "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="],
|
||||||
|
|
||||||
|
"@scalar/themes/@scalar/types/@scalar/openapi-types": ["@scalar/openapi-types@0.1.7", "", {}, "sha512-oOTG3JQifg55U3DhKB7WdNIxFnJzbPJe7rqdyWdio977l8IkxQTVmObftJhdNIMvhV2K+1f/bDoMQGu6yTaD0A=="],
|
||||||
|
|
||||||
|
"@typescript-eslint/typescript-estree/minimatch/brace-expansion": ["brace-expansion@2.0.1", "", { "dependencies": { "balanced-match": "^1.0.0" } }, "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA=="],
|
||||||
|
|
||||||
|
"color/color-convert/color-name": ["color-name@1.1.3", "", {}, "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw=="],
|
||||||
|
}
|
||||||
|
}
|
||||||
64
docker-build.sh
Executable file
64
docker-build.sh
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Enable error handling
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# Function to clean up on script exit
|
||||||
|
cleanup() {
|
||||||
|
echo "Cleaning up..."
|
||||||
|
docker builder prune -f --filter until=24h
|
||||||
|
docker image prune -f
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Clean up Docker system
|
||||||
|
echo "Cleaning up Docker system..."
|
||||||
|
docker system prune -f --volumes
|
||||||
|
|
||||||
|
# Set build arguments for better performance
|
||||||
|
export DOCKER_BUILDKIT=1
|
||||||
|
export COMPOSE_DOCKER_CLI_BUILD=1
|
||||||
|
export BUILDKIT_PROGRESS=plain
|
||||||
|
|
||||||
|
# Calculate available memory and CPU
|
||||||
|
TOTAL_MEM=$(free -m | awk '/^Mem:/{print $2}')
|
||||||
|
BUILD_MEM=$(( TOTAL_MEM / 2 )) # Use half of available memory
|
||||||
|
CPU_COUNT=$(nproc)
|
||||||
|
CPU_QUOTA=$(( CPU_COUNT * 50000 )) # Allow 50% CPU usage per core
|
||||||
|
|
||||||
|
echo "Building with ${BUILD_MEM}MB memory limit and CPU quota ${CPU_QUOTA}"
|
||||||
|
|
||||||
|
# Remove any existing lockfile
|
||||||
|
rm -f bun.lockb
|
||||||
|
|
||||||
|
# Build with resource limits, optimizations, and timeout
|
||||||
|
echo "Building Docker image..."
|
||||||
|
DOCKER_BUILDKIT=1 docker build \
|
||||||
|
--memory="${BUILD_MEM}m" \
|
||||||
|
--memory-swap="${BUILD_MEM}m" \
|
||||||
|
--cpu-quota="${CPU_QUOTA}" \
|
||||||
|
--build-arg BUILDKIT_INLINE_CACHE=1 \
|
||||||
|
--build-arg DOCKER_BUILDKIT=1 \
|
||||||
|
--build-arg NODE_ENV=production \
|
||||||
|
--progress=plain \
|
||||||
|
--no-cache \
|
||||||
|
--compress \
|
||||||
|
-t homeassistant-mcp:latest \
|
||||||
|
-t homeassistant-mcp:$(date +%Y%m%d) \
|
||||||
|
.
|
||||||
|
|
||||||
|
# Check if build was successful
|
||||||
|
BUILD_EXIT_CODE=$?
|
||||||
|
if [ $BUILD_EXIT_CODE -eq 124 ]; then
|
||||||
|
echo "Build timed out after 15 minutes!"
|
||||||
|
exit 1
|
||||||
|
elif [ $BUILD_EXIT_CODE -ne 0 ]; then
|
||||||
|
echo "Build failed with exit code ${BUILD_EXIT_CODE}!"
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Build completed successfully!"
|
||||||
|
|
||||||
|
# Show image size and layers
|
||||||
|
docker image ls homeassistant-mcp:latest --format "Image size: {{.Size}}"
|
||||||
|
echo "Layer count: $(docker history homeassistant-mcp:latest | wc -l)"
|
||||||
|
fi
|
||||||
68
docker/speech/Dockerfile
Normal file
68
docker/speech/Dockerfile
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# Use Python slim image as builder
|
||||||
|
FROM python:3.10-slim as builder
|
||||||
|
|
||||||
|
# Install build dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
git \
|
||||||
|
build-essential \
|
||||||
|
portaudio19-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create and activate virtual environment
|
||||||
|
RUN python -m venv /opt/venv
|
||||||
|
ENV PATH="/opt/venv/bin:$PATH"
|
||||||
|
|
||||||
|
# Install Python dependencies with specific versions and CPU-only variants
|
||||||
|
RUN pip install --no-cache-dir "numpy>=1.24.3,<2.0.0" && \
|
||||||
|
pip install --no-cache-dir torch==2.1.2 torchaudio==2.1.2 --index-url https://download.pytorch.org/whl/cpu && \
|
||||||
|
pip install --no-cache-dir faster-whisper==0.10.0 openwakeword==0.4.0 pyaudio==0.2.14 sounddevice==0.4.6 requests==2.31.0 && \
|
||||||
|
pip freeze > /opt/venv/requirements.txt
|
||||||
|
|
||||||
|
# Create final image
|
||||||
|
FROM python:3.10-slim
|
||||||
|
|
||||||
|
# Copy virtual environment from builder
|
||||||
|
COPY --from=builder /opt/venv /opt/venv
|
||||||
|
ENV PATH="/opt/venv/bin:$PATH"
|
||||||
|
|
||||||
|
# Install audio dependencies
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
portaudio19-dev \
|
||||||
|
python3-pyaudio \
|
||||||
|
alsa-utils \
|
||||||
|
libasound2 \
|
||||||
|
libasound2-plugins \
|
||||||
|
pulseaudio \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Create necessary directories
|
||||||
|
RUN mkdir -p /models/wake_word /audio
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy the wake word detection script
|
||||||
|
COPY wake_word_detector.py .
|
||||||
|
|
||||||
|
# Set environment variables
|
||||||
|
ENV WHISPER_MODEL_PATH=/models \
|
||||||
|
WAKEWORD_MODEL_PATH=/models/wake_word \
|
||||||
|
PYTHONUNBUFFERED=1 \
|
||||||
|
ASR_MODEL=base.en \
|
||||||
|
ASR_MODEL_PATH=/models
|
||||||
|
|
||||||
|
# Add resource limits to Python
|
||||||
|
ENV PYTHONMALLOC=malloc \
|
||||||
|
MALLOC_TRIM_THRESHOLD_=100000 \
|
||||||
|
PYTHONDEVMODE=1
|
||||||
|
|
||||||
|
# Add healthcheck
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||||
|
CMD ps aux | grep '[p]ython' || exit 1
|
||||||
|
|
||||||
|
# Copy audio setup script
|
||||||
|
COPY setup-audio.sh /setup-audio.sh
|
||||||
|
RUN chmod +x /setup-audio.sh
|
||||||
|
|
||||||
|
# Start command
|
||||||
|
CMD ["/bin/bash", "-c", "/setup-audio.sh && python -u wake_word_detector.py"]
|
||||||
16
docker/speech/setup-audio.sh
Executable file
16
docker/speech/setup-audio.sh
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Wait for PulseAudio to be ready
|
||||||
|
sleep 2
|
||||||
|
|
||||||
|
# Mute the monitor to prevent feedback
|
||||||
|
pactl set-source-mute alsa_output.pci-0000_00_1b.0.analog-stereo.monitor 1
|
||||||
|
|
||||||
|
# Set microphone sensitivity to 65%
|
||||||
|
pactl set-source-volume alsa_input.pci-0000_00_1b.0.analog-stereo 65%
|
||||||
|
|
||||||
|
# Set speaker volume to 40%
|
||||||
|
pactl set-sink-volume alsa_output.pci-0000_00_1b.0.analog-stereo 40%
|
||||||
|
|
||||||
|
# Make the script executable
|
||||||
|
chmod +x /setup-audio.sh
|
||||||
415
docker/speech/wake_word_detector.py
Normal file
415
docker/speech/wake_word_detector.py
Normal file
@@ -0,0 +1,415 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import queue
|
||||||
|
import threading
|
||||||
|
import numpy as np
|
||||||
|
import sounddevice as sd
|
||||||
|
from openwakeword import Model
|
||||||
|
from datetime import datetime
|
||||||
|
import wave
|
||||||
|
from faster_whisper import WhisperModel
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
|
||||||
|
# Set up logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.DEBUG,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s'
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
SAMPLE_RATE = 16000
|
||||||
|
CHANNELS = 1
|
||||||
|
CHUNK_SIZE = 1024
|
||||||
|
BUFFER_DURATION = 10 # seconds to keep in buffer
|
||||||
|
DETECTION_THRESHOLD = 0.5
|
||||||
|
CONTINUOUS_TRANSCRIPTION_INTERVAL = 3 # seconds between transcriptions
|
||||||
|
MAX_MODEL_LOAD_RETRIES = 3
|
||||||
|
MODEL_LOAD_RETRY_DELAY = 5 # seconds
|
||||||
|
MODEL_DOWNLOAD_TIMEOUT = 600 # 10 minutes timeout for model download
|
||||||
|
|
||||||
|
# Audio processing parameters
|
||||||
|
NOISE_THRESHOLD = 0.08 # Increased threshold for better noise filtering
|
||||||
|
MIN_SPEECH_DURATION = 2.0 # Longer minimum duration to avoid fragments
|
||||||
|
SILENCE_DURATION = 1.0 # Longer silence duration
|
||||||
|
MAX_REPETITIONS = 1 # More aggressive repetition filtering
|
||||||
|
ECHO_THRESHOLD = 0.75 # More sensitive echo detection
|
||||||
|
MIN_SEGMENT_DURATION = 1.0 # Longer minimum segment duration
|
||||||
|
FEEDBACK_WINDOW = 5 # Window size for feedback detection in seconds
|
||||||
|
|
||||||
|
# Feature flags from environment
|
||||||
|
WAKE_WORD_ENABLED = os.environ.get('ENABLE_WAKE_WORD', 'false').lower() == 'true'
|
||||||
|
SPEECH_ENABLED = os.environ.get('ENABLE_SPEECH_FEATURES', 'true').lower() == 'true'
|
||||||
|
|
||||||
|
# Wake word models to use (only if wake word is enabled)
|
||||||
|
WAKE_WORDS = ["alexa"] # Using 'alexa' as temporary replacement for 'gaja'
|
||||||
|
WAKE_WORD_ALIAS = "gaja" # What we print when wake word is detected
|
||||||
|
|
||||||
|
# Home Assistant Configuration
|
||||||
|
HASS_HOST = os.environ.get('HASS_HOST', 'http://homeassistant.local:8123')
|
||||||
|
HASS_TOKEN = os.environ.get('HASS_TOKEN')
|
||||||
|
|
||||||
|
def initialize_asr_model():
|
||||||
|
"""Initialize the ASR model with retries and timeout"""
|
||||||
|
model_path = os.environ.get('ASR_MODEL_PATH', '/models')
|
||||||
|
model_name = os.environ.get('ASR_MODEL', 'large-v3')
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
for attempt in range(MAX_MODEL_LOAD_RETRIES):
|
||||||
|
try:
|
||||||
|
if time.time() - start_time > MODEL_DOWNLOAD_TIMEOUT:
|
||||||
|
logger.error("Model download timeout exceeded")
|
||||||
|
raise TimeoutError("Model download took too long")
|
||||||
|
|
||||||
|
logger.info(f"Loading ASR model (attempt {attempt + 1}/{MAX_MODEL_LOAD_RETRIES})")
|
||||||
|
model = WhisperModel(
|
||||||
|
model_size_or_path=model_name,
|
||||||
|
device="cpu",
|
||||||
|
compute_type="int8",
|
||||||
|
download_root=model_path,
|
||||||
|
num_workers=1 # Reduce concurrent downloads
|
||||||
|
)
|
||||||
|
logger.info("ASR model loaded successfully")
|
||||||
|
return model
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load ASR model (attempt {attempt + 1}): {e}")
|
||||||
|
if attempt < MAX_MODEL_LOAD_RETRIES - 1:
|
||||||
|
logger.info(f"Retrying in {MODEL_LOAD_RETRY_DELAY} seconds...")
|
||||||
|
time.sleep(MODEL_LOAD_RETRY_DELAY)
|
||||||
|
else:
|
||||||
|
logger.error("Failed to load ASR model after all retries")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Initialize the ASR model with retries
|
||||||
|
try:
|
||||||
|
asr_model = initialize_asr_model()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Critical error initializing ASR model: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def send_command_to_hass(domain, service, entity_id):
|
||||||
|
"""Send command to Home Assistant"""
|
||||||
|
if not HASS_TOKEN:
|
||||||
|
logger.error("Error: HASS_TOKEN not set")
|
||||||
|
return False
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"Authorization": f"Bearer {HASS_TOKEN}",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
}
|
||||||
|
|
||||||
|
url = f"{HASS_HOST}/api/services/{domain}/{service}"
|
||||||
|
data = {"entity_id": entity_id}
|
||||||
|
|
||||||
|
try:
|
||||||
|
response = requests.post(url, headers=headers, json=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
logger.info(f"Command sent: {domain}.{service} for {entity_id}")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error sending command to Home Assistant: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_speech(audio_data, threshold=NOISE_THRESHOLD):
|
||||||
|
"""Detect if audio segment contains speech based on amplitude and frequency content"""
|
||||||
|
# Calculate RMS amplitude
|
||||||
|
rms = np.sqrt(np.mean(np.square(audio_data)))
|
||||||
|
|
||||||
|
# Calculate signal energy in speech frequency range (100-4000 Hz)
|
||||||
|
fft = np.fft.fft(audio_data)
|
||||||
|
freqs = np.fft.fftfreq(len(audio_data), 1/SAMPLE_RATE)
|
||||||
|
speech_mask = (np.abs(freqs) >= 100) & (np.abs(freqs) <= 4000)
|
||||||
|
speech_energy = np.sum(np.abs(fft[speech_mask])) / len(audio_data)
|
||||||
|
|
||||||
|
# Enhanced echo detection
|
||||||
|
# 1. Check for periodic patterns in the signal
|
||||||
|
autocorr = np.correlate(audio_data, audio_data, mode='full')
|
||||||
|
autocorr = autocorr[len(autocorr)//2:] # Use only positive lags
|
||||||
|
peaks = np.where(autocorr > ECHO_THRESHOLD * np.max(autocorr))[0]
|
||||||
|
peak_spacing = np.diff(peaks)
|
||||||
|
has_periodic_echo = len(peak_spacing) > 2 and np.std(peak_spacing) < 0.1 * np.mean(peak_spacing)
|
||||||
|
|
||||||
|
# 2. Check for sudden amplitude changes
|
||||||
|
amplitude_envelope = np.abs(audio_data)
|
||||||
|
amplitude_changes = np.diff(amplitude_envelope)
|
||||||
|
has_feedback_spikes = np.any(np.abs(amplitude_changes) > threshold * 2)
|
||||||
|
|
||||||
|
# 3. Check frequency distribution
|
||||||
|
freq_magnitudes = np.abs(fft)[:len(fft)//2]
|
||||||
|
peak_freqs = freqs[:len(fft)//2][np.argsort(freq_magnitudes)[-3:]]
|
||||||
|
has_feedback_freqs = np.any((peak_freqs > 2000) & (peak_freqs < 4000))
|
||||||
|
|
||||||
|
# Combine all criteria
|
||||||
|
is_valid_speech = (
|
||||||
|
rms > threshold and
|
||||||
|
speech_energy > threshold and
|
||||||
|
not has_periodic_echo and
|
||||||
|
not has_feedback_spikes and
|
||||||
|
not has_feedback_freqs
|
||||||
|
)
|
||||||
|
|
||||||
|
return is_valid_speech
|
||||||
|
|
||||||
|
def process_command(text):
|
||||||
|
"""Process the transcribed command and execute appropriate action"""
|
||||||
|
text = text.lower().strip()
|
||||||
|
|
||||||
|
# Skip if text is too short or contains numbers (likely noise)
|
||||||
|
if len(text) < 5 or any(char.isdigit() for char in text):
|
||||||
|
logger.debug("Text too short or contains numbers, skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Enhanced noise pattern detection
|
||||||
|
noise_patterns = ["lei", "los", "und", "aber", "nicht mehr", "das das", "und und"]
|
||||||
|
for pattern in noise_patterns:
|
||||||
|
if text.count(pattern) > 1: # More aggressive pattern filtering
|
||||||
|
logger.debug(f"Detected noise pattern '{pattern}', skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
# More aggressive repetition detection
|
||||||
|
words = text.split()
|
||||||
|
if len(words) >= 2:
|
||||||
|
# Check for immediate word repetitions
|
||||||
|
for i in range(len(words)-1):
|
||||||
|
if words[i] == words[i+1]:
|
||||||
|
logger.debug(f"Detected immediate word repetition: '{words[i]}', skipping")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check for phrase repetitions
|
||||||
|
phrases = [' '.join(words[i:i+2]) for i in range(len(words)-1)]
|
||||||
|
phrase_counts = {}
|
||||||
|
for phrase in phrases:
|
||||||
|
phrase_counts[phrase] = phrase_counts.get(phrase, 0) + 1
|
||||||
|
if phrase_counts[phrase] > MAX_REPETITIONS:
|
||||||
|
logger.debug(f"Skipping due to excessive repetition: '{phrase}'")
|
||||||
|
return
|
||||||
|
|
||||||
|
# German command mappings
|
||||||
|
commands = {
|
||||||
|
"ausschalten": "turn_off",
|
||||||
|
"einschalten": "turn_on",
|
||||||
|
"an": "turn_on",
|
||||||
|
"aus": "turn_off"
|
||||||
|
}
|
||||||
|
|
||||||
|
rooms = {
|
||||||
|
"wohnzimmer": "living_room",
|
||||||
|
"küche": "kitchen",
|
||||||
|
"schlafzimmer": "bedroom",
|
||||||
|
"bad": "bathroom"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect room
|
||||||
|
detected_room = None
|
||||||
|
for german_room, english_room in rooms.items():
|
||||||
|
if german_room in text:
|
||||||
|
detected_room = english_room
|
||||||
|
break
|
||||||
|
|
||||||
|
# Detect command
|
||||||
|
detected_command = None
|
||||||
|
for german_cmd, english_cmd in commands.items():
|
||||||
|
if german_cmd in text:
|
||||||
|
detected_command = english_cmd
|
||||||
|
break
|
||||||
|
|
||||||
|
if detected_room and detected_command:
|
||||||
|
# Construct entity ID (assuming light)
|
||||||
|
entity_id = f"light.{detected_room}"
|
||||||
|
|
||||||
|
# Send command to Home Assistant
|
||||||
|
if send_command_to_hass("light", detected_command, entity_id):
|
||||||
|
logger.info(f"Executed: {detected_command} for {entity_id}")
|
||||||
|
else:
|
||||||
|
logger.error("Failed to execute command")
|
||||||
|
else:
|
||||||
|
logger.debug(f"No command found in text: '{text}'")
|
||||||
|
|
||||||
|
class AudioProcessor:
|
||||||
|
def __init__(self):
|
||||||
|
logger.info("Initializing AudioProcessor...")
|
||||||
|
self.audio_buffer = queue.Queue()
|
||||||
|
self.recording = False
|
||||||
|
self.buffer = np.zeros(SAMPLE_RATE * BUFFER_DURATION)
|
||||||
|
self.buffer_lock = threading.Lock()
|
||||||
|
self.last_transcription_time = 0
|
||||||
|
self.stream = None
|
||||||
|
self.speech_detected = False
|
||||||
|
self.silence_frames = 0
|
||||||
|
self.speech_frames = 0
|
||||||
|
|
||||||
|
# Initialize wake word detection only if enabled
|
||||||
|
if WAKE_WORD_ENABLED:
|
||||||
|
try:
|
||||||
|
logger.info("Initializing wake word model...")
|
||||||
|
self.wake_word_model = Model(vad_threshold=0.5)
|
||||||
|
self.last_prediction = None
|
||||||
|
logger.info("Wake word model initialized successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize wake word model: {e}")
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
self.wake_word_model = None
|
||||||
|
self.last_prediction = None
|
||||||
|
logger.info("Wake word detection disabled")
|
||||||
|
|
||||||
|
def should_transcribe(self):
|
||||||
|
"""Determine if we should transcribe based on mode and timing"""
|
||||||
|
current_time = datetime.now().timestamp()
|
||||||
|
if not WAKE_WORD_ENABLED:
|
||||||
|
# Check if enough time has passed since last transcription
|
||||||
|
time_since_last = current_time - self.last_transcription_time
|
||||||
|
if time_since_last >= CONTINUOUS_TRANSCRIPTION_INTERVAL:
|
||||||
|
# Only transcribe if we detect speech
|
||||||
|
frames_per_chunk = CHUNK_SIZE
|
||||||
|
min_speech_frames = int(MIN_SPEECH_DURATION * SAMPLE_RATE / frames_per_chunk)
|
||||||
|
|
||||||
|
if self.speech_frames >= min_speech_frames:
|
||||||
|
self.last_transcription_time = current_time
|
||||||
|
self.speech_frames = 0 # Reset counter
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def audio_callback(self, indata, frames, time, status):
|
||||||
|
"""Callback for audio input"""
|
||||||
|
if status:
|
||||||
|
logger.warning(f"Audio callback status: {status}")
|
||||||
|
|
||||||
|
# Convert to mono if necessary
|
||||||
|
if CHANNELS > 1:
|
||||||
|
audio_data = np.mean(indata, axis=1)
|
||||||
|
else:
|
||||||
|
audio_data = indata.flatten()
|
||||||
|
|
||||||
|
# Check for speech
|
||||||
|
if is_speech(audio_data):
|
||||||
|
self.speech_frames += 1
|
||||||
|
self.silence_frames = 0
|
||||||
|
else:
|
||||||
|
self.silence_frames += 1
|
||||||
|
frames_per_chunk = CHUNK_SIZE
|
||||||
|
silence_frames_threshold = int(SILENCE_DURATION * SAMPLE_RATE / frames_per_chunk)
|
||||||
|
|
||||||
|
if self.silence_frames >= silence_frames_threshold:
|
||||||
|
self.speech_frames = 0
|
||||||
|
|
||||||
|
# Update circular buffer
|
||||||
|
with self.buffer_lock:
|
||||||
|
self.buffer = np.roll(self.buffer, -len(audio_data))
|
||||||
|
self.buffer[-len(audio_data):] = audio_data
|
||||||
|
|
||||||
|
if WAKE_WORD_ENABLED:
|
||||||
|
# Process for wake word detection
|
||||||
|
self.last_prediction = self.wake_word_model.predict(audio_data)
|
||||||
|
|
||||||
|
# Check if wake word detected
|
||||||
|
for wake_word in WAKE_WORDS:
|
||||||
|
confidence = self.last_prediction[wake_word]
|
||||||
|
if confidence > DETECTION_THRESHOLD:
|
||||||
|
logger.info(
|
||||||
|
f"Wake word: {WAKE_WORD_ALIAS} (confidence: {confidence:.2f})"
|
||||||
|
)
|
||||||
|
self.process_audio()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# Continuous transcription mode
|
||||||
|
if self.should_transcribe():
|
||||||
|
self.process_audio()
|
||||||
|
|
||||||
|
def process_audio(self):
|
||||||
|
"""Process the current audio buffer (save and transcribe)"""
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
filename = f"/audio/audio_segment_{timestamp}.wav"
|
||||||
|
|
||||||
|
# Save the audio buffer to a WAV file
|
||||||
|
with wave.open(filename, 'wb') as wf:
|
||||||
|
wf.setnchannels(CHANNELS)
|
||||||
|
wf.setsampwidth(2) # 16-bit audio
|
||||||
|
wf.setframerate(SAMPLE_RATE)
|
||||||
|
|
||||||
|
# Convert float32 to int16
|
||||||
|
audio_data = (self.buffer * 32767).astype(np.int16)
|
||||||
|
wf.writeframes(audio_data.tobytes())
|
||||||
|
|
||||||
|
logger.info(f"Saved audio segment to {filename}")
|
||||||
|
|
||||||
|
# Transcribe the audio with German language preference
|
||||||
|
try:
|
||||||
|
segments, info = asr_model.transcribe(
|
||||||
|
filename,
|
||||||
|
language="de", # Set German as preferred language
|
||||||
|
beam_size=5,
|
||||||
|
temperature=0
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get the full transcribed text
|
||||||
|
transcribed_text = " ".join(segment.text for segment in segments)
|
||||||
|
logger.info(f"Transcribed text: {transcribed_text}")
|
||||||
|
|
||||||
|
# Process the command
|
||||||
|
process_command(transcribed_text)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error during transcription or processing: {e}")
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
"""Start audio processing"""
|
||||||
|
try:
|
||||||
|
logger.info("Starting audio processor...")
|
||||||
|
|
||||||
|
# Log configuration
|
||||||
|
logger.debug(f"Sample Rate: {SAMPLE_RATE}")
|
||||||
|
logger.debug(f"Channels: {CHANNELS}")
|
||||||
|
logger.debug(f"Chunk Size: {CHUNK_SIZE}")
|
||||||
|
logger.debug(f"Buffer Duration: {BUFFER_DURATION}")
|
||||||
|
logger.debug(f"Wake Word Enabled: {WAKE_WORD_ENABLED}")
|
||||||
|
logger.debug(f"Speech Enabled: {SPEECH_ENABLED}")
|
||||||
|
logger.debug(f"ASR Model: {os.environ.get('ASR_MODEL')}")
|
||||||
|
|
||||||
|
if WAKE_WORD_ENABLED:
|
||||||
|
logger.info("Initializing wake word detection...")
|
||||||
|
logger.info(f"Loaded wake words: {', '.join(WAKE_WORDS)}")
|
||||||
|
else:
|
||||||
|
logger.info("Starting continuous transcription mode...")
|
||||||
|
interval = CONTINUOUS_TRANSCRIPTION_INTERVAL
|
||||||
|
logger.info(f"Will transcribe every {interval} seconds")
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.debug("Setting up audio input stream...")
|
||||||
|
with sd.InputStream(
|
||||||
|
channels=CHANNELS,
|
||||||
|
samplerate=SAMPLE_RATE,
|
||||||
|
blocksize=CHUNK_SIZE,
|
||||||
|
callback=self.audio_callback
|
||||||
|
):
|
||||||
|
logger.info("Audio input stream started successfully")
|
||||||
|
logger.info("Listening for audio input...")
|
||||||
|
logger.info("Press Ctrl+C to stop")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
sd.sleep(1000) # Sleep for 1 second
|
||||||
|
|
||||||
|
except sd.PortAudioError as e:
|
||||||
|
logger.error(f"Error setting up audio stream: {e}")
|
||||||
|
logger.error("Check if microphone is connected and accessible")
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Unexpected error in audio stream: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info("\nStopping audio processing...")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Critical error in audio processing", exc_info=True)
|
||||||
|
raise
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
logger.info("Initializing AudioProcessor...")
|
||||||
|
processor = AudioProcessor()
|
||||||
|
processor.start()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to start AudioProcessor", exc_info=True)
|
||||||
|
raise
|
||||||
10
docs/Gemfile
10
docs/Gemfile
@@ -4,6 +4,9 @@ gem "github-pages", group: :jekyll_plugins
|
|||||||
gem "jekyll-theme-minimal"
|
gem "jekyll-theme-minimal"
|
||||||
gem "jekyll-relative-links"
|
gem "jekyll-relative-links"
|
||||||
gem "jekyll-seo-tag"
|
gem "jekyll-seo-tag"
|
||||||
|
gem "jekyll-remote-theme"
|
||||||
|
gem "jekyll-github-metadata"
|
||||||
|
gem "faraday-retry"
|
||||||
|
|
||||||
# Windows and JRuby does not include zoneinfo files, so bundle the tzinfo-data gem
|
# Windows and JRuby does not include zoneinfo files, so bundle the tzinfo-data gem
|
||||||
# and associated library.
|
# and associated library.
|
||||||
@@ -11,3 +14,10 @@ platforms :mingw, :x64_mingw, :mswin, :jruby do
|
|||||||
gem "tzinfo", ">= 1"
|
gem "tzinfo", ">= 1"
|
||||||
gem "tzinfo-data"
|
gem "tzinfo-data"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Lock `http_parser.rb` gem to `v0.6.x` on JRuby builds since newer versions of the gem
|
||||||
|
# do not have a Java counterpart.
|
||||||
|
gem "http_parser.rb", "~> 0.6.0", :platforms => [:jruby]
|
||||||
|
|
||||||
|
# Add webrick for Ruby 3.0+
|
||||||
|
gem "webrick", "~> 1.7"
|
||||||
@@ -1,60 +0,0 @@
|
|||||||
# Home Assistant MCP Documentation
|
|
||||||
|
|
||||||
Welcome to the Home Assistant MCP (Master Control Program) documentation. This documentation provides comprehensive information about setting up, configuring, and using the Home Assistant MCP.
|
|
||||||
|
|
||||||
## Table of Contents
|
|
||||||
|
|
||||||
1. [Getting Started](./getting-started.md)
|
|
||||||
- Installation
|
|
||||||
- Configuration
|
|
||||||
- First Steps
|
|
||||||
|
|
||||||
2. [API Reference](./API.md)
|
|
||||||
- REST API Endpoints
|
|
||||||
- Authentication
|
|
||||||
- Error Handling
|
|
||||||
|
|
||||||
3. [SSE (Server-Sent Events)](./SSE_API.md)
|
|
||||||
- Event Subscriptions
|
|
||||||
- Real-time Updates
|
|
||||||
- Connection Management
|
|
||||||
|
|
||||||
4. [Tools](./tools/README.md)
|
|
||||||
- Device Control
|
|
||||||
- Automation Management
|
|
||||||
- Add-on Management
|
|
||||||
- Package Management
|
|
||||||
|
|
||||||
5. [Configuration](./configuration/README.md)
|
|
||||||
- Environment Variables
|
|
||||||
- Security Settings
|
|
||||||
- Performance Tuning
|
|
||||||
|
|
||||||
6. [Development](./development/README.md)
|
|
||||||
- Project Structure
|
|
||||||
- Contributing Guidelines
|
|
||||||
- Testing
|
|
||||||
|
|
||||||
7. [Troubleshooting](./troubleshooting.md)
|
|
||||||
- Common Issues
|
|
||||||
- Debugging
|
|
||||||
- FAQ
|
|
||||||
|
|
||||||
## Quick Links
|
|
||||||
|
|
||||||
- [GitHub Repository](https://github.com/yourusername/homeassistant-mcp)
|
|
||||||
- [Issue Tracker](https://github.com/yourusername/homeassistant-mcp/issues)
|
|
||||||
- [Change Log](./CHANGELOG.md)
|
|
||||||
- [Security Policy](./SECURITY.md)
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
If you need help or have questions:
|
|
||||||
|
|
||||||
1. Check the [Troubleshooting Guide](./troubleshooting.md)
|
|
||||||
2. Search existing [Issues](https://github.com/yourusername/homeassistant-mcp/issues)
|
|
||||||
3. Create a new issue if your problem isn't already reported
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is licensed under the MIT License - see the [LICENSE](../LICENSE) file for details.
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
# Roadmap for MCP Server
|
|
||||||
|
|
||||||
The following roadmap outlines our planned enhancements and future directions for the Home Assistant MCP Server. This document is a living artifact and will be updated regularly as new features are planned and developed.
|
|
||||||
|
|
||||||
## Near-Term Goals
|
|
||||||
|
|
||||||
- **Advanced Automation Capabilities:**
|
|
||||||
- Integrate more sophisticated automation rules with conditional logic and multi-step execution.
|
|
||||||
- Introduce a visual automation builder to simplify rule creation.
|
|
||||||
|
|
||||||
- **Enhanced Security Features:**
|
|
||||||
- Implement multi-factor authentication for critical actions.
|
|
||||||
- Strengthen encryption methods and data handling practices.
|
|
||||||
- Expand monitoring and alerting for potential security breaches.
|
|
||||||
|
|
||||||
- **Performance Optimizations:**
|
|
||||||
- Refine the server's resource utilization to reduce latency.
|
|
||||||
- Optimize real-time data streaming via SSE for higher throughput.
|
|
||||||
- Introduce advanced caching mechanisms for frequently requested data.
|
|
||||||
|
|
||||||
## Mid-Term Goals
|
|
||||||
|
|
||||||
- **User Interface Improvements:**
|
|
||||||
- Develop an intuitive, web-based dashboard for easier device management and monitoring.
|
|
||||||
- Provide real-time analytics and performance metrics in the dashboard.
|
|
||||||
|
|
||||||
- **Expanded Integrations:**
|
|
||||||
- Support a broader range of smart home devices and brands.
|
|
||||||
- Integrate with additional home automation platforms and third-party services.
|
|
||||||
|
|
||||||
- **Developer Experience Enhancements:**
|
|
||||||
- Improve documentation and developer tooling.
|
|
||||||
- Streamline contribution guidelines and testing setups.
|
|
||||||
|
|
||||||
## Long-Term Vision
|
|
||||||
|
|
||||||
- **Ecosystem Expansion:**
|
|
||||||
- Build a modular plugin system that allows community-driven extensions and integrations.
|
|
||||||
- Enable seamless integration with future technologies in the smart home and AI domains.
|
|
||||||
|
|
||||||
- **Scalability and Resilience:**
|
|
||||||
- Architect the system to support large-scale deployments across multiple instances.
|
|
||||||
- Incorporate advanced load balancing and failover mechanisms.
|
|
||||||
|
|
||||||
## How to Follow the Roadmap
|
|
||||||
|
|
||||||
- **Community Involvement:** We encourage community feedback and contributions to help refine and prioritize our roadmap.
|
|
||||||
- **Regular Updates:** This document is regularly updated with new goals, milestones, and completed tasks.
|
|
||||||
- **Transparency:** Check the project's GitHub repository and issues for ongoing discussions and updates related to roadmap items.
|
|
||||||
|
|
||||||
*This roadmap is intended to serve as a guide and may evolve based on community needs, technological advancements, and strategic priorities.*
|
|
||||||
@@ -2,9 +2,24 @@ title: Model Context Protocol (MCP)
|
|||||||
description: A bridge between Home Assistant and Language Learning Models
|
description: A bridge between Home Assistant and Language Learning Models
|
||||||
theme: jekyll-theme-minimal
|
theme: jekyll-theme-minimal
|
||||||
markdown: kramdown
|
markdown: kramdown
|
||||||
|
|
||||||
|
# Repository settings
|
||||||
|
repository: jango-blockchained/advanced-homeassistant-mcp
|
||||||
|
github: [metadata]
|
||||||
|
|
||||||
|
# Add base URL and URL settings
|
||||||
|
baseurl: "/advanced-homeassistant-mcp" # the subpath of your site
|
||||||
|
url: "https://jango-blockchained.github.io" # the base hostname & protocol
|
||||||
|
|
||||||
|
# Theme settings
|
||||||
|
logo: /assets/img/logo.png # path to logo (create this if you want a logo)
|
||||||
|
show_downloads: true # show download buttons for your repo
|
||||||
|
|
||||||
plugins:
|
plugins:
|
||||||
- jekyll-relative-links
|
- jekyll-relative-links
|
||||||
- jekyll-seo-tag
|
- jekyll-seo-tag
|
||||||
|
- jekyll-remote-theme
|
||||||
|
- jekyll-github-metadata
|
||||||
|
|
||||||
# Enable relative links
|
# Enable relative links
|
||||||
relative_links:
|
relative_links:
|
||||||
@@ -16,7 +31,39 @@ header_pages:
|
|||||||
- index.md
|
- index.md
|
||||||
- getting-started.md
|
- getting-started.md
|
||||||
- api.md
|
- api.md
|
||||||
|
- usage.md
|
||||||
|
- tools/tools.md
|
||||||
|
- development/development.md
|
||||||
|
- troubleshooting.md
|
||||||
- contributing.md
|
- contributing.md
|
||||||
|
- roadmap.md
|
||||||
|
|
||||||
|
# Collections
|
||||||
|
collections:
|
||||||
|
tools:
|
||||||
|
output: true
|
||||||
|
permalink: /:collection/:name
|
||||||
|
development:
|
||||||
|
output: true
|
||||||
|
permalink: /:collection/:name
|
||||||
|
|
||||||
|
# Default layouts
|
||||||
|
defaults:
|
||||||
|
- scope:
|
||||||
|
path: ""
|
||||||
|
type: "pages"
|
||||||
|
values:
|
||||||
|
layout: "default"
|
||||||
|
- scope:
|
||||||
|
path: "tools"
|
||||||
|
type: "tools"
|
||||||
|
values:
|
||||||
|
layout: "default"
|
||||||
|
- scope:
|
||||||
|
path: "development"
|
||||||
|
type: "development"
|
||||||
|
values:
|
||||||
|
layout: "default"
|
||||||
|
|
||||||
# Exclude files from processing
|
# Exclude files from processing
|
||||||
exclude:
|
exclude:
|
||||||
@@ -24,3 +71,8 @@ exclude:
|
|||||||
- Gemfile.lock
|
- Gemfile.lock
|
||||||
- node_modules
|
- node_modules
|
||||||
- vendor
|
- vendor
|
||||||
|
|
||||||
|
# Sass settings
|
||||||
|
sass:
|
||||||
|
style: compressed
|
||||||
|
sass_dir: _sass
|
||||||
401
docs/api.md
401
docs/api.md
@@ -1,4 +1,290 @@
|
|||||||
# API Reference
|
# 🚀 Home Assistant MCP API Documentation
|
||||||
|
|
||||||
|
 
|
||||||
|
|
||||||
|
## 🌟 Quick Start
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Get API schema with caching
|
||||||
|
curl -X GET http://localhost:3000/mcp \
|
||||||
|
-H "Cache-Control: max-age=3600" # Cache for 1 hour
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🔌 Core Functions ⚙️
|
||||||
|
|
||||||
|
### State Management (`/api/state`)
|
||||||
|
```http
|
||||||
|
GET /api/state?cache=true # Enable client-side caching
|
||||||
|
POST /api/state
|
||||||
|
```
|
||||||
|
|
||||||
|
**Example Request:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"context": "living_room",
|
||||||
|
"state": {
|
||||||
|
"lights": "on",
|
||||||
|
"temperature": 22
|
||||||
|
},
|
||||||
|
"_cache": { // Optional caching config
|
||||||
|
"ttl": 300, // 5 minutes
|
||||||
|
"tags": ["lights", "climate"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## ⚡ Action Endpoints
|
||||||
|
|
||||||
|
### Execute Action with Cache Validation
|
||||||
|
```http
|
||||||
|
POST /api/action
|
||||||
|
If-None-Match: "etag_value" // Prevent duplicate actions
|
||||||
|
```
|
||||||
|
|
||||||
|
**Batch Processing:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"actions": [
|
||||||
|
{ "action": "🌞 Morning Routine", "params": { "brightness": 80 } },
|
||||||
|
{ "action": "❄️ AC Control", "params": { "temp": 21 } }
|
||||||
|
],
|
||||||
|
"_parallel": true // Execute actions concurrently
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🔍 Query Functions
|
||||||
|
|
||||||
|
### Available Actions with ETag
|
||||||
|
```http
|
||||||
|
GET /api/actions
|
||||||
|
ETag: "a1b2c3d4" // Client-side cache validation
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response Headers:**
|
||||||
|
```
|
||||||
|
Cache-Control: public, max-age=86400 // 24-hour cache
|
||||||
|
ETag: "a1b2c3d4"
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🌐 WebSocket Events
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
const ws = new WebSocket('wss://ha-mcp/ws');
|
||||||
|
ws.onmessage = ({ data }) => {
|
||||||
|
const event = JSON.parse(data);
|
||||||
|
if(event.type === 'STATE_UPDATE') {
|
||||||
|
updateUI(event.payload); // 🎨 Real-time UI sync
|
||||||
|
}
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🗃️ Caching Strategies
|
||||||
|
|
||||||
|
### Client-Side Caching
|
||||||
|
```http
|
||||||
|
GET /api/devices
|
||||||
|
Cache-Control: max-age=300, stale-while-revalidate=60
|
||||||
|
```
|
||||||
|
|
||||||
|
### Server-Side Cache-Control
|
||||||
|
```typescript
|
||||||
|
// Example middleware configuration
|
||||||
|
app.use(
|
||||||
|
cacheMiddleware({
|
||||||
|
ttl: 60 * 5, // 5 minutes
|
||||||
|
paths: ['/api/devices', '/mcp'],
|
||||||
|
vary: ['Authorization'] // User-specific caching
|
||||||
|
})
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
## ❌ Error Handling
|
||||||
|
|
||||||
|
**429 Too Many Requests:**
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"error": {
|
||||||
|
"code": "RATE_LIMITED",
|
||||||
|
"message": "Slow down! 🐢",
|
||||||
|
"retry_after": 30,
|
||||||
|
"docs": "https://ha-mcp/docs/rate-limits"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🚦 Rate Limiting Tiers
|
||||||
|
|
||||||
|
| Tier | Requests/min | Features |
|
||||||
|
|---------------|--------------|------------------------|
|
||||||
|
| Guest | 10 | Basic read-only |
|
||||||
|
| User | 100 | Full access |
|
||||||
|
| Power User | 500 | Priority queue |
|
||||||
|
| Integration | 1000 | Bulk operations |
|
||||||
|
|
||||||
|
## 🛠️ Example Usage
|
||||||
|
|
||||||
|
### Smart Cache Refresh
|
||||||
|
```javascript
|
||||||
|
async function getDevices() {
|
||||||
|
const response = await fetch('/api/devices', {
|
||||||
|
headers: {
|
||||||
|
'If-None-Match': localStorage.getItem('devicesETag')
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if(response.status === 304) { // Not Modified
|
||||||
|
return JSON.parse(localStorage.devicesCache);
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
localStorage.setItem('devicesETag', response.headers.get('ETag'));
|
||||||
|
localStorage.setItem('devicesCache', JSON.stringify(data));
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## 🔒 Security Middleware (Enhanced)
|
||||||
|
|
||||||
|
### Cache-Aware Rate Limiting
|
||||||
|
```typescript
|
||||||
|
app.use(
|
||||||
|
rateLimit({
|
||||||
|
windowMs: 15 * 60 * 1000, // 15 minutes
|
||||||
|
max: 100, // Limit each IP to 100 requests per window
|
||||||
|
cache: new RedisStore(), // Distributed cache
|
||||||
|
keyGenerator: (req) => {
|
||||||
|
return `${req.ip}-${req.headers.authorization}`;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
### Security Headers
|
||||||
|
```http
|
||||||
|
Content-Security-Policy: default-src 'self';
|
||||||
|
Strict-Transport-Security: max-age=31536000;
|
||||||
|
X-Content-Type-Options: nosniff;
|
||||||
|
Cache-Control: public, max-age=600;
|
||||||
|
ETag: "abc123"
|
||||||
|
```
|
||||||
|
|
||||||
|
## 📘 Best Practices
|
||||||
|
|
||||||
|
1. **Cache Wisely:** Use `ETag` and `Cache-Control` headers for state data
|
||||||
|
2. **Batch Operations:** Combine requests using `/api/actions/batch`
|
||||||
|
3. **WebSocket First:** Prefer real-time updates over polling
|
||||||
|
4. **Error Recovery:** Implement exponential backoff with jitter
|
||||||
|
5. **Cache Invalidation:** Use tags for bulk invalidation
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
graph LR
|
||||||
|
A[Client] -->|Cached Request| B{CDN}
|
||||||
|
B -->|Cache Hit| C[Return 304]
|
||||||
|
B -->|Cache Miss| D[Origin Server]
|
||||||
|
D -->|Response| B
|
||||||
|
B -->|Response| A
|
||||||
|
```
|
||||||
|
|
||||||
|
> Pro Tip: Use `curl -I` to inspect cache headers! 🔍
|
||||||
|
|
||||||
|
## Device Control
|
||||||
|
|
||||||
|
### Common Entity Controls
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "control",
|
||||||
|
"command": "turn_on", // Options: "turn_on", "turn_off", "toggle"
|
||||||
|
"entity_id": "light.living_room"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Light Control
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "control",
|
||||||
|
"command": "turn_on",
|
||||||
|
"entity_id": "light.living_room",
|
||||||
|
"brightness": 128,
|
||||||
|
"color_temp": 4000,
|
||||||
|
"rgb_color": [255, 0, 0]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Add-on Management
|
||||||
|
|
||||||
|
### List Available Add-ons
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "addon",
|
||||||
|
"action": "list"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Add-on
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "addon",
|
||||||
|
"action": "install",
|
||||||
|
"slug": "core_configurator",
|
||||||
|
"version": "5.6.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Manage Add-on State
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "addon",
|
||||||
|
"action": "start", // Options: "start", "stop", "restart"
|
||||||
|
"slug": "core_configurator"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Package Management
|
||||||
|
|
||||||
|
### List HACS Packages
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "package",
|
||||||
|
"action": "list",
|
||||||
|
"category": "integration" // Options: "integration", "plugin", "theme", "python_script", "appdaemon", "netdaemon"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Package
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tool": "package",
|
||||||
|
"action": "install",
|
||||||
|
"category": "integration",
|
||||||
|
"repository": "hacs/integration",
|
||||||
|
"version": "1.32.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Automation Management
|
||||||
|
|
||||||
|
For automation management details and endpoints, please refer to the [Tools Documentation](tools/tools.md).
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
- Validate and sanitize all user inputs.
|
||||||
|
- Enforce rate limiting to prevent abuse.
|
||||||
|
- Apply proper security headers.
|
||||||
|
- Gracefully handle errors based on the environment.
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
If you experience issues with the API:
|
||||||
|
- Verify the endpoint and request payload.
|
||||||
|
- Check authentication tokens and required headers.
|
||||||
|
- Consult the [Troubleshooting Guide](troubleshooting.md) for further guidance.
|
||||||
|
|
||||||
## MCP Schema Endpoint
|
## MCP Schema Endpoint
|
||||||
|
|
||||||
@@ -43,119 +329,6 @@ Example response:
|
|||||||
|
|
||||||
Note: The `/mcp` endpoint is publicly accessible and does not require authentication, as it only provides schema information.
|
Note: The `/mcp` endpoint is publicly accessible and does not require authentication, as it only provides schema information.
|
||||||
|
|
||||||
## Device Control
|
|
||||||
|
|
||||||
### Common Entity Controls
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "control",
|
|
||||||
"command": "turn_on", // or "turn_off", "toggle"
|
|
||||||
"entity_id": "light.living_room"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Light Control
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "control",
|
|
||||||
"command": "turn_on",
|
|
||||||
"entity_id": "light.living_room",
|
|
||||||
"brightness": 128,
|
|
||||||
"color_temp": 4000,
|
|
||||||
"rgb_color": [255, 0, 0]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Add-on Management
|
|
||||||
|
|
||||||
### List Available Add-ons
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "addon",
|
|
||||||
"action": "list"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Install Add-on
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "addon",
|
|
||||||
"action": "install",
|
|
||||||
"slug": "core_configurator",
|
|
||||||
"version": "5.6.0"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Manage Add-on State
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "addon",
|
|
||||||
"action": "start", // or "stop", "restart"
|
|
||||||
"slug": "core_configurator"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Package Management
|
|
||||||
|
|
||||||
### List HACS Packages
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "package",
|
|
||||||
"action": "list",
|
|
||||||
"category": "integration" // or "plugin", "theme", "python_script", "appdaemon", "netdaemon"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Install Package
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "package",
|
|
||||||
"action": "install",
|
|
||||||
"category": "integration",
|
|
||||||
"repository": "hacs/integration",
|
|
||||||
"version": "1.32.0"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Automation Management
|
|
||||||
|
|
||||||
### Create Automation
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "automation_config",
|
|
||||||
"action": "create",
|
|
||||||
"config": {
|
|
||||||
"alias": "Motion Light",
|
|
||||||
"description": "Turn on light when motion detected",
|
|
||||||
"mode": "single",
|
|
||||||
"trigger": [
|
|
||||||
{
|
|
||||||
"platform": "state",
|
|
||||||
"entity_id": "binary_sensor.motion",
|
|
||||||
"to": "on"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"action": [
|
|
||||||
{
|
|
||||||
"service": "light.turn_on",
|
|
||||||
"target": {
|
|
||||||
"entity_id": "light.living_room"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Duplicate Automation
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"tool": "automation_config",
|
|
||||||
"action": "duplicate",
|
|
||||||
"automation_id": "automation.motion_light"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Core Functions
|
## Core Functions
|
||||||
|
|
||||||
### State Management
|
### State Management
|
||||||
|
|||||||
@@ -43,3 +43,26 @@ The MCP Server is designed as a high-performance, secure, and scalable bridge be
|
|||||||
The architecture of the MCP Server prioritizes performance, scalability, and security. By leveraging Bun's high-performance runtime, employing real-time communication through SSE, and maintaining a modular, secure design, the MCP Server provides a robust platform for integrating Home Assistant with modern LLM functionalities.
|
The architecture of the MCP Server prioritizes performance, scalability, and security. By leveraging Bun's high-performance runtime, employing real-time communication through SSE, and maintaining a modular, secure design, the MCP Server provides a robust platform for integrating Home Assistant with modern LLM functionalities.
|
||||||
|
|
||||||
*This document is a living document and will be updated as the system evolves.*
|
*This document is a living document and will be updated as the system evolves.*
|
||||||
|
|
||||||
|
## Key Components
|
||||||
|
|
||||||
|
- **API Module:** Handles RESTful endpoints, authentication, and error management.
|
||||||
|
- **SSE Module:** Provides real-time updates through Server-Sent Events.
|
||||||
|
- **Tools Module:** Offers various utilities for device control, automation, and data processing.
|
||||||
|
- **Security Module:** Implements token-based authentication and secure communications.
|
||||||
|
- **Integration Module:** Bridges data between Home Assistant and external systems.
|
||||||
|
|
||||||
|
## Data Flow
|
||||||
|
|
||||||
|
1. Requests enter via the API endpoints.
|
||||||
|
2. Security middleware validates and processes requests.
|
||||||
|
3. Core modules process data and execute the necessary business logic.
|
||||||
|
4. Real-time notifications are managed by the SSE module.
|
||||||
|
|
||||||
|
## Future Enhancements
|
||||||
|
|
||||||
|
- Expand modularity with potential microservices.
|
||||||
|
- Enhance security with multi-factor authentication.
|
||||||
|
- Improve scalability through distributed architectures.
|
||||||
|
|
||||||
|
*Further diagrams and detailed breakdowns will be added in future updates.*
|
||||||
@@ -1,27 +1,27 @@
|
|||||||
# Contributing to Home Assistant MCP
|
# Contributing to Home Assistant MCP
|
||||||
|
|
||||||
We welcome contributions from the community! Your help and feedback are essential to improving the MCP Server. Please review the following guidelines before contributing.
|
We welcome community contributions to improve the MCP Server. Please review the following guidelines before contributing.
|
||||||
|
|
||||||
## How to Contribute
|
## How to Contribute
|
||||||
|
|
||||||
1. **Fork the Repository**: Create your personal fork of the project on GitHub.
|
1. **Fork the Repository:** Create your personal fork on GitHub.
|
||||||
2. **Create a Feature Branch**: Use clear branch names, e.g., `feature/your-feature` or `bugfix/short-description`.
|
2. **Create a Feature Branch:** Use a clear name (e.g., `feature/your-feature` or `bugfix/short-description`).
|
||||||
3. **Make Changes**: Develop your feature or fix bugs. Ensure your code follows the project's coding conventions.
|
3. **Make Changes:** Develop your feature or fix bugs while following our coding standards.
|
||||||
4. **Write Tests**: Include tests for new features or bug fixes. We aim for high code coverage.
|
4. **Write Tests:** Include tests for new features or bug fixes.
|
||||||
5. **Submit a Pull Request (PR)**: Once your changes are complete, submit a PR. Our team will review your request and provide feedback.
|
5. **Submit a Pull Request:** Once your changes are complete, submit a PR for review.
|
||||||
6. **Address Feedback**: Make any necessary revisions based on code review feedback.
|
6. **Address Feedback:** Revise your PR based on maintainers' suggestions.
|
||||||
|
|
||||||
## Code Style Guidelines
|
## Code Style Guidelines
|
||||||
|
|
||||||
- Follow the project's established coding style.
|
- Follow the project's established coding style.
|
||||||
- Use Bun tooling for linting and formatting:
|
- Use Bun tooling for linting and formatting:
|
||||||
- `bun run lint` for linting
|
- `bun run lint`
|
||||||
- `bun run format` for formatting
|
- `bun run format`
|
||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
- Update relevant documentation alongside your code changes.
|
- Update documentation alongside your code changes.
|
||||||
- Ensure that tests pass and coverage remains high.
|
- Ensure tests pass and coverage remains high.
|
||||||
|
|
||||||
## Reporting Issues
|
## Reporting Issues
|
||||||
|
|
||||||
|
|||||||
@@ -1,122 +1,30 @@
|
|||||||
# Getting Started with Home Assistant MCP
|
# Getting Started
|
||||||
|
|
||||||
This guide will help you get started with the Home Assistant MCP (Master Control Program).
|
Begin your journey with the Home Assistant MCP Server by following these steps:
|
||||||
|
|
||||||
## Prerequisites
|
- **API Documentation:** Read the [API Documentation](api.md) for available endpoints.
|
||||||
|
- **Real-Time Updates:** Learn about [Server-Sent Events](sse-api.md) for live communication.
|
||||||
Before you begin, ensure you have:
|
- **Tools:** Explore available [Tools](tools/tools.md) for device control and automation.
|
||||||
|
- **Configuration:** Refer to the [Configuration Guide](configuration.md) for setup and advanced settings.
|
||||||
1. Node.js (v16 or higher)
|
|
||||||
2. A running Home Assistant instance
|
|
||||||
3. A Home Assistant Long-Lived Access Token
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
1. Clone the repository:
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/yourusername/homeassistant-mcp.git
|
|
||||||
cd homeassistant-mcp
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Install dependencies:
|
|
||||||
```bash
|
|
||||||
bun install
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Copy the example environment file:
|
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
```
|
|
||||||
|
|
||||||
4. Edit the `.env` file with your configuration:
|
|
||||||
```env
|
|
||||||
# Server Configuration
|
|
||||||
PORT=3000
|
|
||||||
BUN_ENV=development
|
|
||||||
|
|
||||||
# Home Assistant Configuration
|
|
||||||
HASS_HOST=http://your-hass-instance:8123
|
|
||||||
HASS_TOKEN=your-long-lived-access-token
|
|
||||||
|
|
||||||
# Security Configuration
|
|
||||||
JWT_SECRET=your-secret-key
|
|
||||||
```
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
- `PORT`: The port number for the MCP server (default: 3000)
|
|
||||||
- `BUN_ENV`: The environment mode (development, production, test)
|
|
||||||
- `HASS_HOST`: Your Home Assistant instance URL
|
|
||||||
- `HASS_TOKEN`: Your Home Assistant Long-Lived Access Token
|
|
||||||
- `JWT_SECRET`: Secret key for JWT token generation
|
|
||||||
|
|
||||||
### Development Mode
|
|
||||||
|
|
||||||
For development, you can use:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bun run dev
|
|
||||||
```
|
|
||||||
|
|
||||||
This will start the server in development mode with hot reloading.
|
|
||||||
|
|
||||||
### Production Mode
|
|
||||||
|
|
||||||
For production, build and start the server:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
bun run build
|
|
||||||
bun start
|
|
||||||
```
|
|
||||||
|
|
||||||
## First Steps
|
|
||||||
|
|
||||||
1. Check the server is running:
|
|
||||||
```bash
|
|
||||||
curl http://localhost:3000/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
2. List available devices:
|
|
||||||
```bash
|
|
||||||
curl -H "Authorization: Bearer your-token" http://localhost:3000/api/tools/devices
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Subscribe to events:
|
|
||||||
```bash
|
|
||||||
curl -H "Authorization: Bearer your-token" http://localhost:3000/api/sse/subscribe?events=state_changed
|
|
||||||
```
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
- Read the [API Documentation](./API.md) for available endpoints
|
|
||||||
- Learn about [Server-Sent Events](./SSE_API.md) for real-time updates
|
|
||||||
- Explore available [Tools](./tools/README.md) for device control
|
|
||||||
- Check the [Configuration Guide](./configuration/README.md) for advanced settings
|
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
If you encounter issues:
|
If you encounter any issues:
|
||||||
|
1. Verify that your Home Assistant instance is accessible.
|
||||||
1. Verify your Home Assistant instance is accessible
|
2. Ensure that all required environment variables are properly set.
|
||||||
2. Check your environment variables are correctly set
|
3. Consult the [Troubleshooting Guide](troubleshooting.md) for additional solutions.
|
||||||
3. Look for errors in the server logs
|
|
||||||
4. Consult the [Troubleshooting Guide](./troubleshooting.md)
|
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
For development and contributing:
|
For contributors:
|
||||||
|
1. Fork the repository.
|
||||||
1. Fork the repository
|
2. Create a feature branch.
|
||||||
2. Create a feature branch
|
3. Follow the [Development Guide](development/development.md) for contribution guidelines.
|
||||||
3. Follow the [Development Guide](./development/README.md)
|
4. Submit a pull request with your enhancements.
|
||||||
4. Submit a pull request
|
|
||||||
|
|
||||||
## Support
|
## Support
|
||||||
|
|
||||||
Need help? Check out:
|
Need help?
|
||||||
|
- Visit our [GitHub Issues](https://github.com/jango-blockchained/homeassistant-mcp/issues).
|
||||||
- [GitHub Issues](https://github.com/yourusername/homeassistant-mcp/issues)
|
- Review the [Troubleshooting Guide](troubleshooting.md).
|
||||||
- [Troubleshooting Guide](./troubleshooting.md)
|
- Check the [FAQ](troubleshooting.md#faq) for common questions.
|
||||||
- [FAQ](./troubleshooting.md#faq)
|
|
||||||
@@ -4,11 +4,34 @@ title: Home
|
|||||||
nav_order: 1
|
nav_order: 1
|
||||||
---
|
---
|
||||||
|
|
||||||
|
# 📚 Home Assistant MCP Documentation
|
||||||
|
|
||||||
|
Welcome to the documentation for the Home Assistant MCP (Model Context Protocol) Server.
|
||||||
|
|
||||||
|
## 📑 Documentation Index
|
||||||
|
|
||||||
|
- [Getting Started Guide](getting-started.md)
|
||||||
|
- [API Documentation](api.md)
|
||||||
|
- [Troubleshooting](troubleshooting.md)
|
||||||
|
- [Contributing Guide](contributing.md)
|
||||||
|
|
||||||
|
For project overview, installation, and general information, please see our [main README](../README.md).
|
||||||
|
|
||||||
|
## 🔗 Quick Links
|
||||||
|
|
||||||
|
- [GitHub Repository](https://github.com/jango-blockchained/homeassistant-mcp)
|
||||||
|
- [Issue Tracker](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||||
|
- [GitHub Discussions](https://github.com/jango-blockchained/homeassistant-mcp/discussions)
|
||||||
|
|
||||||
|
## 📝 License
|
||||||
|
|
||||||
|
This project is licensed under the MIT License. See [LICENSE](../LICENSE) for details.
|
||||||
|
|
||||||
# Model Context Protocol (MCP) Server
|
# Model Context Protocol (MCP) Server
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
The Model Context Protocol (MCP) Server is a cutting-edge bridge between Home Assistant and Language Learning Models (LLMs), designed to revolutionize smart home automation and control.
|
The Model Context Protocol (MCP) Server is a cutting-edge bridge between Home Assistant and Language Learning Models (LLMs), designed to revolutionize smart home automation and control. This documentation provides comprehensive information about setting up, configuring, and using the Home Assistant MCP.
|
||||||
|
|
||||||
## Key Features
|
## Key Features
|
||||||
|
|
||||||
@@ -29,18 +52,59 @@ The Model Context Protocol (MCP) Server is a cutting-edge bridge between Home As
|
|||||||
|
|
||||||
## Documentation
|
## Documentation
|
||||||
|
|
||||||
- [Getting Started](getting-started.md) - Installation and basic setup
|
### Core Documentation
|
||||||
- [API Reference](api.md) - Complete API documentation
|
1. [Getting Started](getting-started.md)
|
||||||
- [SSE API](sse-api.md) - Server-Sent Events API documentation
|
- Installation and basic setup
|
||||||
- [Architecture](architecture.md) - System architecture and design
|
- Configuration
|
||||||
- [Contributing](contributing.md) - How to contribute to the project
|
- First Steps
|
||||||
- [Troubleshooting](troubleshooting.md) - Common issues and solutions
|
|
||||||
|
2. [API Reference](api.md)
|
||||||
|
- REST API Endpoints
|
||||||
|
- Authentication
|
||||||
|
- Error Handling
|
||||||
|
|
||||||
|
3. [SSE API](sse-api.md)
|
||||||
|
- Event Subscriptions
|
||||||
|
- Real-time Updates
|
||||||
|
- Connection Management
|
||||||
|
|
||||||
|
### Advanced Topics
|
||||||
|
4. [Architecture](architecture.md)
|
||||||
|
- System Design
|
||||||
|
- Components
|
||||||
|
- Data Flow
|
||||||
|
|
||||||
|
5. [Configuration](getting-started.md#configuration)
|
||||||
|
- Environment Variables
|
||||||
|
- Security Settings
|
||||||
|
- Performance Tuning
|
||||||
|
|
||||||
|
6. [Development Guide](development/development.md)
|
||||||
|
- Project Structure
|
||||||
|
- Contributing Guidelines
|
||||||
|
- Testing
|
||||||
|
|
||||||
|
7. [Troubleshooting](troubleshooting.md)
|
||||||
|
- Common Issues
|
||||||
|
- Debugging
|
||||||
|
- FAQ
|
||||||
|
|
||||||
|
## Quick Links
|
||||||
|
|
||||||
|
- [GitHub Repository](https://github.com/jango-blockchained/homeassistant-mcp)
|
||||||
|
- [Issue Tracker](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||||
|
- [Contributing Guide](contributing.md)
|
||||||
|
- [Roadmap](roadmap.md)
|
||||||
|
|
||||||
## Community and Support
|
## Community and Support
|
||||||
|
|
||||||
- [GitHub Issues](https://github.com/jango-blockchained/homeassistant-mcp/issues) - Report bugs and request features
|
If you need help or have questions:
|
||||||
- [GitHub Discussions](https://github.com/jango-blockchained/homeassistant-mcp/discussions) - Ask questions and share ideas
|
|
||||||
|
1. Check the [Troubleshooting Guide](troubleshooting.md)
|
||||||
|
2. Search existing [Issues](https://github.com/jango-blockchained/homeassistant-mcp/issues)
|
||||||
|
3. Join our [GitHub Discussions](https://github.com/jango-blockchained/homeassistant-mcp/discussions)
|
||||||
|
4. Create a new issue if your problem isn't already reported
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Licensed under the MIT License. See [LICENSE](https://github.com/jango-blockchained/homeassistant-mcp/blob/main/LICENSE) for details.
|
This project is licensed under the MIT License. See [LICENSE](https://github.com/jango-blockchained/homeassistant-mcp/blob/main/LICENSE) for details.
|
||||||
51
docs/roadmap.md
Normal file
51
docs/roadmap.md
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
# Roadmap for MCP Server
|
||||||
|
|
||||||
|
The following roadmap outlines our planned enhancements and future directions for the Home Assistant MCP Server. This document is a living guide that will be updated as new features are planned and developed.
|
||||||
|
|
||||||
|
## Near-Term Goals
|
||||||
|
|
||||||
|
- **Advanced Automation Capabilities:**
|
||||||
|
- Integrate sophisticated automation rules with conditional logic and multi-step execution.
|
||||||
|
- Introduce a visual automation builder for simplified rule creation.
|
||||||
|
|
||||||
|
- **Enhanced Security Features:**
|
||||||
|
- Implement multi-factor authentication for critical actions.
|
||||||
|
- Strengthen encryption methods and data handling practices.
|
||||||
|
- Expand monitoring and alerting for potential security breaches.
|
||||||
|
|
||||||
|
- **Performance Optimizations:**
|
||||||
|
- Refine resource utilization to reduce latency.
|
||||||
|
- Optimize real-time data streaming via SSE.
|
||||||
|
- Introduce advanced caching mechanisms for frequently requested data.
|
||||||
|
|
||||||
|
## Mid-Term Goals
|
||||||
|
|
||||||
|
- **User Interface Improvements:**
|
||||||
|
- Develop an intuitive web-based dashboard for device management and monitoring.
|
||||||
|
- Provide real-time analytics and performance metrics.
|
||||||
|
|
||||||
|
- **Expanded Integrations:**
|
||||||
|
- Support a broader range of smart home devices and brands.
|
||||||
|
- Integrate with additional home automation platforms and third-party services.
|
||||||
|
|
||||||
|
- **Developer Experience Enhancements:**
|
||||||
|
- Improve documentation and developer tooling.
|
||||||
|
- Streamline contribution guidelines and testing setups.
|
||||||
|
|
||||||
|
## Long-Term Vision
|
||||||
|
|
||||||
|
- **Ecosystem Expansion:**
|
||||||
|
- Build a modular plugin system for community-driven extensions and integrations.
|
||||||
|
- Enable seamless integration with future technologies in smart home and AI domains.
|
||||||
|
|
||||||
|
- **Scalability and Resilience:**
|
||||||
|
- Architect the system to support large-scale deployments.
|
||||||
|
- Incorporate advanced load balancing and failover mechanisms.
|
||||||
|
|
||||||
|
## How to Follow the Roadmap
|
||||||
|
|
||||||
|
- **Community Involvement:** We welcome and encourage feedback.
|
||||||
|
- **Regular Updates:** This document is updated regularly with new goals and milestones.
|
||||||
|
- **Transparency:** Check our GitHub repository and issue tracker for ongoing discussions.
|
||||||
|
|
||||||
|
*This roadmap is intended as a guide and may evolve based on community needs, technological advancements, and strategic priorities.*
|
||||||
@@ -7,23 +7,24 @@
|
|||||||
bun test # Run all tests
|
bun test # Run all tests
|
||||||
bun test --watch # Run tests in watch mode
|
bun test --watch # Run tests in watch mode
|
||||||
bun test --coverage # Run tests with coverage
|
bun test --coverage # Run tests with coverage
|
||||||
bun test path/to/test.ts # Run specific test file
|
bun test path/to/test.ts # Run a specific test file
|
||||||
|
|
||||||
# Additional Options
|
# Additional Options
|
||||||
DEBUG=true bun test # Run with debug output
|
DEBUG=true bun test # Run with debug output
|
||||||
bun test --pattern "auth" # Run tests matching pattern
|
bun test --pattern "auth" # Run tests matching a pattern
|
||||||
bun test --timeout 60000 # Run with custom timeout
|
bun test --timeout 60000 # Run with a custom timeout
|
||||||
```
|
```
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
This document describes the testing setup and practices used in the Home Assistant MCP project. The project uses Bun's test runner for unit and integration testing, with a comprehensive test suite covering security, SSE (Server-Sent Events), middleware, and other core functionalities.
|
This document describes the testing setup and practices used in the Home Assistant MCP project. We use Bun's test runner for both unit and integration testing, ensuring comprehensive coverage across modules.
|
||||||
|
|
||||||
## Test Structure
|
## Test Structure
|
||||||
|
|
||||||
Tests are organized in two main locations:
|
Tests are organized in two main locations:
|
||||||
|
|
||||||
1. **Root Level Integration Tests** (`/__tests__/`):
|
1. **Root Level Integration Tests** (`/__tests__/`):
|
||||||
|
|
||||||
```
|
```
|
||||||
__tests__/
|
__tests__/
|
||||||
├── ai/ # AI/ML component tests
|
├── ai/ # AI/ML component tests
|
||||||
@@ -40,6 +41,7 @@ Tests are organized in two main locations:
|
|||||||
```
|
```
|
||||||
|
|
||||||
2. **Component Level Unit Tests** (`src/**/`):
|
2. **Component Level Unit Tests** (`src/**/`):
|
||||||
|
|
||||||
```
|
```
|
||||||
src/
|
src/
|
||||||
├── __tests__/ # Global test setup and utilities
|
├── __tests__/ # Global test setup and utilities
|
||||||
@@ -49,8 +51,6 @@ Tests are organized in two main locations:
|
|||||||
│ └── component.ts
|
│ └── component.ts
|
||||||
```
|
```
|
||||||
|
|
||||||
The root level `__tests__` directory contains integration and end-to-end tests that verify the interaction between different components of the system, while the component-level tests focus on unit testing individual modules.
|
|
||||||
|
|
||||||
## Test Configuration
|
## Test Configuration
|
||||||
|
|
||||||
### Bun Test Configuration (`bunfig.toml`)
|
### Bun Test Configuration (`bunfig.toml`)
|
||||||
@@ -88,26 +88,21 @@ bun run format
|
|||||||
|
|
||||||
### Global Configuration
|
### Global Configuration
|
||||||
|
|
||||||
The project uses a global test setup file (`src/__tests__/setup.ts`) that provides:
|
A global test setup file (`src/__tests__/setup.ts`) provides:
|
||||||
|
|
||||||
- Environment configuration
|
- Environment configuration
|
||||||
- Mock utilities
|
- Mock utilities
|
||||||
- Test helper functions
|
- Test helper functions
|
||||||
- Global test lifecycle hooks
|
- Global lifecycle hooks
|
||||||
|
|
||||||
### Test Environment
|
### Test Environment
|
||||||
|
|
||||||
Tests run with the following configuration:
|
- Environment variables are loaded from `.env.test`.
|
||||||
|
- Console output is minimized unless `DEBUG=true`.
|
||||||
- Environment variables are loaded from `.env.test`
|
- JWT secrets and tokens are preconfigured for testing.
|
||||||
- Console output is suppressed during tests (unless DEBUG=true)
|
- Rate limiting and security features are initialized appropriately.
|
||||||
- JWT secrets and tokens are automatically configured for testing
|
|
||||||
- Rate limiting and other security features are properly initialized
|
|
||||||
|
|
||||||
## Running Tests
|
## Running Tests
|
||||||
|
|
||||||
To run the test suite:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Basic test run
|
# Basic test run
|
||||||
bun test
|
bun test
|
||||||
@@ -115,7 +110,7 @@ bun test
|
|||||||
# Run tests with coverage
|
# Run tests with coverage
|
||||||
bun test --coverage
|
bun test --coverage
|
||||||
|
|
||||||
# Run specific test file
|
# Run a specific test file
|
||||||
bun test path/to/test.test.ts
|
bun test path/to/test.test.ts
|
||||||
|
|
||||||
# Run tests in watch mode
|
# Run tests in watch mode
|
||||||
@@ -131,82 +126,61 @@ bun test --timeout 60000
|
|||||||
bun test --pattern "auth"
|
bun test --pattern "auth"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Test Environment Setup
|
## Advanced Debugging
|
||||||
|
|
||||||
1. **Prerequisites**:
|
### Using Node Inspector
|
||||||
- Bun >= 1.0.0
|
|
||||||
- Node.js dependencies (see package.json)
|
|
||||||
|
|
||||||
2. **Environment Files**:
|
```bash
|
||||||
- `.env.test` - Test environment variables
|
# Start tests with inspector
|
||||||
- `.env.development` - Development environment variables
|
bun test --inspect
|
||||||
|
|
||||||
3. **Test Data**:
|
# Start tests with inspector and break on first line
|
||||||
- Mock responses in `__tests__/mock-responses/`
|
bun test --inspect-brk
|
||||||
- Test fixtures in `__tests__/fixtures/`
|
```
|
||||||
|
|
||||||
### Continuous Integration
|
### Using VS Code
|
||||||
|
|
||||||
The project uses GitHub Actions for CI/CD. Tests are automatically run on:
|
Create a launch configuration in `.vscode/launch.json`:
|
||||||
- Pull requests
|
|
||||||
- Pushes to main branch
|
```json
|
||||||
- Release tags
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"type": "bun",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Debug Tests",
|
||||||
|
"program": "${workspaceFolder}/node_modules/bun/bin/bun",
|
||||||
|
"args": ["test", "${file}"],
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"env": { "DEBUG": "true" }
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Test Isolation
|
||||||
|
|
||||||
|
To run a single test in isolation:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
describe.only("specific test suite", () => {
|
||||||
|
it.only("specific test case", () => {
|
||||||
|
// Only this test will run
|
||||||
|
});
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
## Writing Tests
|
## Writing Tests
|
||||||
|
|
||||||
### Test File Naming
|
### Test File Naming
|
||||||
|
|
||||||
- Test files should be placed in a `__tests__` directory adjacent to the code being tested
|
- Place test files in a `__tests__` directory adjacent to the code being tested.
|
||||||
- Test files should be named `*.test.ts`
|
- Name files with the pattern `*.test.ts`.
|
||||||
- Test files should mirror the structure of the source code
|
- Mirror the structure of the source code in your test organization.
|
||||||
|
|
||||||
### Test Structure
|
### Example Test Structure
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { describe, expect, it, beforeEach } from "bun:test";
|
|
||||||
|
|
||||||
describe("Module Name", () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
// Setup for each test
|
|
||||||
});
|
|
||||||
|
|
||||||
describe("Feature/Function Name", () => {
|
|
||||||
it("should do something specific", () => {
|
|
||||||
// Test implementation
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Test Utilities
|
|
||||||
|
|
||||||
The project provides several test utilities:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { testUtils } from "../__tests__/setup";
|
|
||||||
|
|
||||||
// Available utilities:
|
|
||||||
- mockWebSocket() // Mock WebSocket for SSE tests
|
|
||||||
- mockResponse() // Mock HTTP response for API tests
|
|
||||||
- mockRequest() // Mock HTTP request for API tests
|
|
||||||
- createTestClient() // Create test SSE client
|
|
||||||
- createTestEvent() // Create test event
|
|
||||||
- createTestEntity() // Create test Home Assistant entity
|
|
||||||
- wait() // Helper to wait for async operations
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing Patterns
|
|
||||||
|
|
||||||
### Security Testing
|
|
||||||
|
|
||||||
Security tests cover:
|
|
||||||
- Token validation and encryption
|
|
||||||
- Rate limiting
|
|
||||||
- Request validation
|
|
||||||
- Input sanitization
|
|
||||||
- Error handling
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```typescript
|
```typescript
|
||||||
describe("Security Features", () => {
|
describe("Security Features", () => {
|
||||||
it("should validate tokens correctly", () => {
|
it("should validate tokens correctly", () => {
|
||||||
@@ -218,83 +192,17 @@ describe("Security Features", () => {
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
### SSE Testing
|
## Coverage
|
||||||
|
|
||||||
SSE tests cover:
|
The project maintains strict coverage:
|
||||||
- Client authentication
|
- Overall coverage: at least 80%
|
||||||
- Message broadcasting
|
- Critical paths: 90%+
|
||||||
- Rate limiting
|
- New features: ≥85% coverage
|
||||||
- Subscription management
|
|
||||||
- Client cleanup
|
|
||||||
|
|
||||||
Example:
|
Generate a coverage report with:
|
||||||
```typescript
|
|
||||||
describe("SSE Features", () => {
|
|
||||||
it("should authenticate valid clients", () => {
|
|
||||||
const client = createTestClient("test-client");
|
|
||||||
const result = sseManager.addClient(client, validToken);
|
|
||||||
expect(result?.authenticated).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Middleware Testing
|
```bash
|
||||||
|
bun test --coverage
|
||||||
Middleware tests cover:
|
|
||||||
- Request validation
|
|
||||||
- Input sanitization
|
|
||||||
- Error handling
|
|
||||||
- Response formatting
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```typescript
|
|
||||||
describe("Middleware", () => {
|
|
||||||
it("should sanitize HTML in request body", () => {
|
|
||||||
const req = mockRequest({
|
|
||||||
body: { text: '<script>alert("xss")</script>' }
|
|
||||||
});
|
|
||||||
sanitizeInput(req, res, next);
|
|
||||||
expect(req.body.text).toBe("");
|
|
||||||
});
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Integration Testing
|
|
||||||
|
|
||||||
Integration tests in the root `__tests__` directory cover:
|
|
||||||
|
|
||||||
- **AI/ML Components**: Testing machine learning model integrations and predictions
|
|
||||||
- **API Integration**: End-to-end API route testing
|
|
||||||
- **Context Management**: Testing context persistence and state management
|
|
||||||
- **Home Assistant Integration**: Testing communication with Home Assistant
|
|
||||||
- **Schema Validation**: Testing data validation across the application
|
|
||||||
- **Security Integration**: Testing security features in a full system context
|
|
||||||
- **WebSocket Communication**: Testing real-time communication
|
|
||||||
- **Server Integration**: Testing the complete server setup and configuration
|
|
||||||
|
|
||||||
Example integration test:
|
|
||||||
```typescript
|
|
||||||
describe("API Integration", () => {
|
|
||||||
it("should handle a complete authentication flow", async () => {
|
|
||||||
// Setup test client
|
|
||||||
const client = await createTestClient();
|
|
||||||
|
|
||||||
// Test registration
|
|
||||||
const regResponse = await client.register(testUser);
|
|
||||||
expect(regResponse.status).toBe(201);
|
|
||||||
|
|
||||||
// Test authentication
|
|
||||||
const authResponse = await client.authenticate(testCredentials);
|
|
||||||
expect(authResponse.status).toBe(200);
|
|
||||||
expect(authResponse.body.token).toBeDefined();
|
|
||||||
|
|
||||||
// Test protected endpoint access
|
|
||||||
const protectedResponse = await client.get("/api/protected", {
|
|
||||||
headers: { Authorization: `Bearer ${authResponse.body.token}` }
|
|
||||||
});
|
|
||||||
expect(protectedResponse.status).toBe(200);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Security Middleware Testing
|
## Security Middleware Testing
|
||||||
@@ -1,144 +1,135 @@
|
|||||||
# Troubleshooting Guide
|
# Troubleshooting Guide
|
||||||
|
|
||||||
This guide helps you diagnose and fix common issues with the Home Assistant MCP.
|
This guide provides solutions to common issues encountered with the Home Assistant MCP Server.
|
||||||
|
|
||||||
## Common Issues
|
## Common Issues
|
||||||
|
|
||||||
### Connection Issues
|
- **Server Not Starting:**
|
||||||
|
- Verify that all required environment variables are correctly set.
|
||||||
|
- Check for port conflicts or missing dependencies.
|
||||||
|
- Review the server logs for error details.
|
||||||
|
|
||||||
#### Cannot Connect to Home Assistant
|
- **Connection Problems:**
|
||||||
|
- Ensure your Home Assistant instance is reachable.
|
||||||
|
- Confirm that the authentication token is valid.
|
||||||
|
- Check network configurations and firewalls.
|
||||||
|
|
||||||
|
## Tool Issues
|
||||||
|
|
||||||
|
### Tool Not Found
|
||||||
|
|
||||||
**Symptoms:**
|
**Symptoms:**
|
||||||
- Connection timeout errors
|
- "Tool not found" errors or 404 responses.
|
||||||
- "Failed to connect to Home Assistant" messages
|
|
||||||
- 401 Unauthorized errors
|
|
||||||
|
|
||||||
**Solutions:**
|
**Solutions:**
|
||||||
1. Verify Home Assistant is running
|
- Double-check the tool name spelling.
|
||||||
2. Check HASS_HOST environment variable
|
- Verify that the tool is correctly registered.
|
||||||
3. Validate HASS_TOKEN is correct
|
- Review tool imports and documentation.
|
||||||
4. Ensure network connectivity
|
|
||||||
5. Check firewall settings
|
|
||||||
|
|
||||||
#### SSE Connection Drops
|
### Tool Execution Failures
|
||||||
|
|
||||||
**Symptoms:**
|
**Symptoms:**
|
||||||
- Frequent disconnections
|
- Execution errors or timeouts.
|
||||||
- Missing events
|
|
||||||
- Connection reset errors
|
|
||||||
|
|
||||||
**Solutions:**
|
**Solutions:**
|
||||||
1. Check network stability
|
- Validate input parameters.
|
||||||
2. Increase connection timeout
|
- Check and review error logs.
|
||||||
3. Implement reconnection logic
|
- Debug the tool implementation.
|
||||||
4. Monitor server resources
|
- Ensure proper permissions in Home Assistant.
|
||||||
|
|
||||||
### Authentication Issues
|
## Debugging Steps
|
||||||
|
|
||||||
#### Invalid Token
|
|
||||||
|
|
||||||
**Symptoms:**
|
|
||||||
- 401 Unauthorized responses
|
|
||||||
- "Invalid token" messages
|
|
||||||
- Authentication failures
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
1. Generate new Long-Lived Access Token
|
|
||||||
2. Check token expiration
|
|
||||||
3. Verify token format
|
|
||||||
4. Update environment variables
|
|
||||||
|
|
||||||
#### Rate Limiting
|
|
||||||
|
|
||||||
**Symptoms:**
|
|
||||||
- 429 Too Many Requests
|
|
||||||
- "Rate limit exceeded" messages
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
1. Implement request throttling
|
|
||||||
2. Adjust rate limit settings
|
|
||||||
3. Cache responses
|
|
||||||
4. Optimize request patterns
|
|
||||||
|
|
||||||
### Tool Issues
|
|
||||||
|
|
||||||
#### Tool Not Found
|
|
||||||
|
|
||||||
**Symptoms:**
|
|
||||||
- "Tool not found" errors
|
|
||||||
- 404 Not Found responses
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
1. Check tool name spelling
|
|
||||||
2. Verify tool registration
|
|
||||||
3. Update tool imports
|
|
||||||
4. Check tool availability
|
|
||||||
|
|
||||||
#### Tool Execution Fails
|
|
||||||
|
|
||||||
**Symptoms:**
|
|
||||||
- Tool execution errors
|
|
||||||
- Unexpected responses
|
|
||||||
- Timeout issues
|
|
||||||
|
|
||||||
**Solutions:**
|
|
||||||
1. Validate input parameters
|
|
||||||
2. Check error logs
|
|
||||||
3. Debug tool implementation
|
|
||||||
4. Verify Home Assistant permissions
|
|
||||||
|
|
||||||
## Debugging
|
|
||||||
|
|
||||||
### Server Logs
|
### Server Logs
|
||||||
|
|
||||||
1. Enable debug logging:
|
1. Enable debug logging by setting:
|
||||||
```env
|
```env
|
||||||
LOG_LEVEL=debug
|
LOG_LEVEL=debug
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Check logs:
|
2. Check logs:
|
||||||
```bash
|
```bash
|
||||||
npm run logs
|
npm run logs
|
||||||
```
|
```
|
||||||
|
3. Filter errors:
|
||||||
3. Filter logs:
|
|
||||||
```bash
|
```bash
|
||||||
npm run logs | grep "error"
|
npm run logs | grep "error"
|
||||||
```
|
```
|
||||||
|
|
||||||
### Network Debugging
|
### Network Debugging
|
||||||
|
|
||||||
1. Check API endpoints:
|
1. Test API endpoints:
|
||||||
```bash
|
```bash
|
||||||
curl -v http://localhost:3000/api/health
|
curl -v http://localhost:3000/api/health
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Monitor SSE connections:
|
2. Monitor SSE connections:
|
||||||
```bash
|
```bash
|
||||||
curl -N http://localhost:3000/api/sse/stats
|
curl -N http://localhost:3000/api/sse/stats
|
||||||
```
|
```
|
||||||
|
3. Test WebSocket connectivity:
|
||||||
3. Test WebSocket:
|
|
||||||
```bash
|
```bash
|
||||||
wscat -c ws://localhost:3000
|
wscat -c ws://localhost:3000
|
||||||
```
|
```
|
||||||
|
|
||||||
### Performance Issues
|
### Performance Issues
|
||||||
|
|
||||||
1. Monitor memory usage:
|
- Monitor memory usage with:
|
||||||
```bash
|
```bash
|
||||||
npm run stats
|
npm run stats
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Check response times:
|
## Security Middleware Troubleshooting
|
||||||
```bash
|
|
||||||
curl -w "%{time_total}\n" -o /dev/null -s http://localhost:3000/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Profile code:
|
### Rate Limiting Problems
|
||||||
```bash
|
|
||||||
npm run profile
|
**Symptoms:** Receiving 429 (Too Many Requests) errors.
|
||||||
```
|
|
||||||
|
**Solutions:**
|
||||||
|
- Adjust and fine-tune rate limit settings.
|
||||||
|
- Consider different limits for critical versus non-critical endpoints.
|
||||||
|
|
||||||
|
### Request Validation Failures
|
||||||
|
|
||||||
|
**Symptoms:** 400 or 415 errors on valid requests.
|
||||||
|
|
||||||
|
**Solutions:**
|
||||||
|
- Verify that the `Content-Type` header is set correctly.
|
||||||
|
- Inspect request payload size and format.
|
||||||
|
|
||||||
|
### Input Sanitization Issues
|
||||||
|
|
||||||
|
**Symptoms:** Unexpected data transformation or loss.
|
||||||
|
|
||||||
|
**Solutions:**
|
||||||
|
- Test sanitization with various input types.
|
||||||
|
- Implement custom sanitization for complex data if needed.
|
||||||
|
|
||||||
|
### Security Header Configuration
|
||||||
|
|
||||||
|
**Symptoms:** Missing or improper security headers.
|
||||||
|
|
||||||
|
**Solutions:**
|
||||||
|
- Review and update security header configurations (e.g., Helmet settings).
|
||||||
|
- Ensure environment-specific header settings are in place.
|
||||||
|
|
||||||
|
### Error Handling and Logging
|
||||||
|
|
||||||
|
**Symptoms:** Inconsistent error responses.
|
||||||
|
|
||||||
|
**Solutions:**
|
||||||
|
- Enhance logging for detailed error tracking.
|
||||||
|
- Adjust error handlers for production and development differences.
|
||||||
|
|
||||||
|
## Additional Resources
|
||||||
|
|
||||||
|
- [OWASP Security Guidelines](https://owasp.org/www-project-top-ten/)
|
||||||
|
- [Helmet.js Documentation](https://helmetjs.github.io/)
|
||||||
|
- [JWT Security Best Practices](https://jwt.io/introduction)
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
If issues persist:
|
||||||
|
1. Review detailed logs.
|
||||||
|
2. Verify your configuration and environment.
|
||||||
|
3. Consult the GitHub issue tracker or community forums.
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
|
|
||||||
@@ -167,7 +158,7 @@ A: Adjust SSE_MAX_CLIENTS in configuration or clean up stale connections.
|
|||||||
1. Documentation
|
1. Documentation
|
||||||
- [API Reference](./API.md)
|
- [API Reference](./API.md)
|
||||||
- [Configuration Guide](./configuration/README.md)
|
- [Configuration Guide](./configuration/README.md)
|
||||||
- [Development Guide](./development/README.md)
|
- [Development Guide](./development/development.md)
|
||||||
|
|
||||||
2. Community
|
2. Community
|
||||||
- GitHub Issues
|
- GitHub Issues
|
||||||
|
|||||||
@@ -1,5 +1,34 @@
|
|||||||
# Usage Guide
|
# Usage Guide
|
||||||
|
|
||||||
|
This guide explains how to use the Home Assistant MCP Server for smart home device management and integration with language learning systems.
|
||||||
|
|
||||||
## Basic Usage
|
## Basic Usage
|
||||||
|
|
||||||
|
1. **Starting the Server:**
|
||||||
|
- For development: run `npm run dev`.
|
||||||
|
- For production: run `npm run build` followed by `npm start`.
|
||||||
|
|
||||||
|
2. **Accessing the Web Interface:**
|
||||||
|
- Open [http://localhost:3000](http://localhost:3000) in your browser.
|
||||||
|
|
||||||
|
3. **Real-Time Updates:**
|
||||||
|
- Connect to the SSE endpoint at `/subscribe_events?token=YOUR_TOKEN&domain=light` to receive live updates.
|
||||||
|
|
||||||
## Advanced Features
|
## Advanced Features
|
||||||
|
|
||||||
|
1. **API Interactions:**
|
||||||
|
- Use the REST API for operations such as device control, automation, and add-on management.
|
||||||
|
- See [API Documentation](api.md) for details.
|
||||||
|
|
||||||
|
2. **Tool Integrations:**
|
||||||
|
- Multiple tools are available (see [Tools Documentation](tools/tools.md)), for tasks like automation management and notifications.
|
||||||
|
|
||||||
|
3. **Security Settings:**
|
||||||
|
- Configure token-based authentication and environment variables as per the [Configuration Guide](getting-started/configuration.md).
|
||||||
|
|
||||||
|
4. **Customization and Extensions:**
|
||||||
|
- Extend server functionality by developing new tools as outlined in the [Development Guide](development/development.md).
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
If you experience issues, review the [Troubleshooting Guide](troubleshooting.md).
|
||||||
91
examples/README.md
Normal file
91
examples/README.md
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
# Speech-to-Text Examples
|
||||||
|
|
||||||
|
This directory contains examples demonstrating how to use the speech-to-text integration with wake word detection.
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
1. Make sure you have Docker installed and running
|
||||||
|
2. Build and start the services:
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
## Running the Example
|
||||||
|
|
||||||
|
1. Install dependencies:
|
||||||
|
```bash
|
||||||
|
npm install
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Run the example:
|
||||||
|
```bash
|
||||||
|
npm run example:speech
|
||||||
|
```
|
||||||
|
|
||||||
|
Or using `ts-node` directly:
|
||||||
|
```bash
|
||||||
|
npx ts-node examples/speech-to-text-example.ts
|
||||||
|
```
|
||||||
|
|
||||||
|
## Features Demonstrated
|
||||||
|
|
||||||
|
1. **Wake Word Detection**
|
||||||
|
- Listens for wake words: "hey jarvis", "ok google", "alexa"
|
||||||
|
- Automatically saves audio when wake word is detected
|
||||||
|
- Transcribes the detected speech
|
||||||
|
|
||||||
|
2. **Manual Transcription**
|
||||||
|
- Example of how to transcribe audio files manually
|
||||||
|
- Supports different models and configurations
|
||||||
|
|
||||||
|
3. **Event Handling**
|
||||||
|
- Wake word detection events
|
||||||
|
- Transcription results
|
||||||
|
- Progress updates
|
||||||
|
- Error handling
|
||||||
|
|
||||||
|
## Example Output
|
||||||
|
|
||||||
|
When a wake word is detected, you'll see output like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
🎤 Wake word detected!
|
||||||
|
Timestamp: 20240203_123456
|
||||||
|
Audio file: /path/to/audio/wake_word_20240203_123456.wav
|
||||||
|
Metadata file: /path/to/audio/wake_word_20240203_123456.wav.json
|
||||||
|
|
||||||
|
📝 Transcription result:
|
||||||
|
Full text: This is what was said after the wake word.
|
||||||
|
|
||||||
|
Segments:
|
||||||
|
1. [0.00s - 1.52s] (95.5% confidence)
|
||||||
|
"This is what was said"
|
||||||
|
2. [1.52s - 2.34s] (98.2% confidence)
|
||||||
|
"after the wake word."
|
||||||
|
```
|
||||||
|
|
||||||
|
## Customization
|
||||||
|
|
||||||
|
You can customize the behavior by:
|
||||||
|
|
||||||
|
1. Changing the wake word models in `docker/speech/Dockerfile`
|
||||||
|
2. Modifying transcription options in the example file
|
||||||
|
3. Adding your own event handlers
|
||||||
|
4. Implementing different audio processing logic
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
1. **Docker Issues**
|
||||||
|
- Make sure Docker is running
|
||||||
|
- Check container logs: `docker-compose logs fast-whisper`
|
||||||
|
- Verify container is up: `docker ps`
|
||||||
|
|
||||||
|
2. **Audio Issues**
|
||||||
|
- Check audio device permissions
|
||||||
|
- Verify audio file format (WAV files recommended)
|
||||||
|
- Check audio file permissions
|
||||||
|
|
||||||
|
3. **Performance Issues**
|
||||||
|
- Try using a smaller model (tiny.en or base.en)
|
||||||
|
- Adjust beam size and patience parameters
|
||||||
|
- Consider using GPU acceleration if available
|
||||||
91
examples/speech-to-text-example.ts
Normal file
91
examples/speech-to-text-example.ts
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
import { SpeechToText, TranscriptionResult, WakeWordEvent } from '../src/speech/speechToText';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
// Initialize the speech-to-text service
|
||||||
|
const speech = new SpeechToText('fast-whisper');
|
||||||
|
|
||||||
|
// Check if the service is available
|
||||||
|
const isHealthy = await speech.checkHealth();
|
||||||
|
if (!isHealthy) {
|
||||||
|
console.error('Speech service is not available. Make sure Docker is running and the fast-whisper container is up.');
|
||||||
|
console.error('Run: docker-compose up -d');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Speech service is ready!');
|
||||||
|
console.log('Listening for wake words: "hey jarvis", "ok google", "alexa"');
|
||||||
|
console.log('Press Ctrl+C to exit');
|
||||||
|
|
||||||
|
// Set up event handlers
|
||||||
|
speech.on('wake_word', (event: WakeWordEvent) => {
|
||||||
|
console.log('\n🎤 Wake word detected!');
|
||||||
|
console.log(' Timestamp:', event.timestamp);
|
||||||
|
console.log(' Audio file:', event.audioFile);
|
||||||
|
console.log(' Metadata file:', event.metadataFile);
|
||||||
|
});
|
||||||
|
|
||||||
|
speech.on('transcription', (event: { audioFile: string; result: TranscriptionResult }) => {
|
||||||
|
console.log('\n📝 Transcription result:');
|
||||||
|
console.log(' Full text:', event.result.text);
|
||||||
|
console.log('\n Segments:');
|
||||||
|
event.result.segments.forEach((segment, index) => {
|
||||||
|
console.log(` ${index + 1}. [${segment.start.toFixed(2)}s - ${segment.end.toFixed(2)}s] (${(segment.confidence * 100).toFixed(1)}% confidence)`);
|
||||||
|
console.log(` "${segment.text}"`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
speech.on('progress', (event: { type: string; data: string }) => {
|
||||||
|
if (event.type === 'stderr' && !event.data.includes('Loading model')) {
|
||||||
|
console.error('❌ Error:', event.data);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
speech.on('error', (error: Error) => {
|
||||||
|
console.error('❌ Error:', error.message);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Example of manual transcription
|
||||||
|
async function transcribeFile(filepath: string) {
|
||||||
|
try {
|
||||||
|
console.log(`\n🎯 Manually transcribing: ${filepath}`);
|
||||||
|
const result = await speech.transcribeAudio(filepath, {
|
||||||
|
model: 'base.en', // You can change this to tiny.en, small.en, medium.en, or large-v2
|
||||||
|
language: 'en',
|
||||||
|
temperature: 0,
|
||||||
|
beamSize: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('\n📝 Transcription result:');
|
||||||
|
console.log(' Text:', result.text);
|
||||||
|
} catch (error) {
|
||||||
|
console.error('❌ Transcription failed:', error instanceof Error ? error.message : error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create audio directory if it doesn't exist
|
||||||
|
const audioDir = path.join(__dirname, '..', 'audio');
|
||||||
|
if (!require('fs').existsSync(audioDir)) {
|
||||||
|
require('fs').mkdirSync(audioDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start wake word detection
|
||||||
|
speech.startWakeWordDetection(audioDir);
|
||||||
|
|
||||||
|
// Example: You can also manually transcribe files
|
||||||
|
// Uncomment the following line and replace with your audio file:
|
||||||
|
// await transcribeFile('/path/to/your/audio.wav');
|
||||||
|
|
||||||
|
// Keep the process running
|
||||||
|
process.on('SIGINT', () => {
|
||||||
|
console.log('\nStopping speech service...');
|
||||||
|
speech.stopWakeWordDetection();
|
||||||
|
process.exit(0);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Run the example
|
||||||
|
main().catch(error => {
|
||||||
|
console.error('Fatal error:', error);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
site_name: Project Documentation
|
site_name: Home Assistant Model Context Protocol (MCP)
|
||||||
site_url: https://yourusername.github.io/your-repo-name/
|
site_url: https://yourusername.github.io/your-repo-name/
|
||||||
repo_url: https://github.com/yourusername/your-repo-name
|
repo_url: https://github.com/yourusername/your-repo-name
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "homeassistant-mcp",
|
"name": "homeassistant-mcp",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "Home Assistant Master Control Program",
|
"description": "Home Assistant Model Context Protocol",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -21,7 +21,7 @@
|
|||||||
"profile": "bun --inspect src/index.ts",
|
"profile": "bun --inspect src/index.ts",
|
||||||
"clean": "rm -rf dist .bun coverage",
|
"clean": "rm -rf dist .bun coverage",
|
||||||
"typecheck": "bun x tsc --noEmit",
|
"typecheck": "bun x tsc --noEmit",
|
||||||
"preinstall": "bun install --frozen-lockfile"
|
"example:speech": "bun run examples/speech-to-text-example.ts"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@elysiajs/cors": "^1.2.0",
|
"@elysiajs/cors": "^1.2.0",
|
||||||
@@ -37,6 +37,8 @@
|
|||||||
"node-fetch": "^3.3.2",
|
"node-fetch": "^3.3.2",
|
||||||
"sanitize-html": "^2.11.0",
|
"sanitize-html": "^2.11.0",
|
||||||
"typescript": "^5.3.3",
|
"typescript": "^5.3.3",
|
||||||
|
"winston": "^3.11.0",
|
||||||
|
"winston-daily-rotate-file": "^5.0.0",
|
||||||
"ws": "^8.16.0",
|
"ws": "^8.16.0",
|
||||||
"zod": "^3.22.4"
|
"zod": "^3.22.4"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ config({ path: resolve(process.cwd(), envFile) });
|
|||||||
*/
|
*/
|
||||||
export const AppConfigSchema = z.object({
|
export const AppConfigSchema = z.object({
|
||||||
/** Server Configuration */
|
/** Server Configuration */
|
||||||
PORT: z.number().default(4000),
|
PORT: z.coerce.number().default(4000),
|
||||||
NODE_ENV: z
|
NODE_ENV: z
|
||||||
.enum(["development", "production", "test"])
|
.enum(["development", "production", "test"])
|
||||||
.default("development"),
|
.default("development"),
|
||||||
@@ -33,6 +33,21 @@ export const AppConfigSchema = z.object({
|
|||||||
HASS_HOST: z.string().default("http://192.168.178.63:8123"),
|
HASS_HOST: z.string().default("http://192.168.178.63:8123"),
|
||||||
HASS_TOKEN: z.string().optional(),
|
HASS_TOKEN: z.string().optional(),
|
||||||
|
|
||||||
|
/** Speech Features Configuration */
|
||||||
|
SPEECH: z.object({
|
||||||
|
ENABLED: z.boolean().default(false),
|
||||||
|
WAKE_WORD_ENABLED: z.boolean().default(false),
|
||||||
|
SPEECH_TO_TEXT_ENABLED: z.boolean().default(false),
|
||||||
|
WHISPER_MODEL_PATH: z.string().default("/models"),
|
||||||
|
WHISPER_MODEL_TYPE: z.string().default("base"),
|
||||||
|
}).default({
|
||||||
|
ENABLED: false,
|
||||||
|
WAKE_WORD_ENABLED: false,
|
||||||
|
SPEECH_TO_TEXT_ENABLED: false,
|
||||||
|
WHISPER_MODEL_PATH: "/models",
|
||||||
|
WHISPER_MODEL_TYPE: "base",
|
||||||
|
}),
|
||||||
|
|
||||||
/** Security Configuration */
|
/** Security Configuration */
|
||||||
JWT_SECRET: z.string().default("your-secret-key"),
|
JWT_SECRET: z.string().default("your-secret-key"),
|
||||||
RATE_LIMIT: z.object({
|
RATE_LIMIT: z.object({
|
||||||
@@ -113,4 +128,11 @@ export const APP_CONFIG = AppConfigSchema.parse({
|
|||||||
LOG_REQUESTS: process.env.LOG_REQUESTS === "true",
|
LOG_REQUESTS: process.env.LOG_REQUESTS === "true",
|
||||||
},
|
},
|
||||||
VERSION: "0.1.0",
|
VERSION: "0.1.0",
|
||||||
|
SPEECH: {
|
||||||
|
ENABLED: process.env.ENABLE_SPEECH_FEATURES === "true",
|
||||||
|
WAKE_WORD_ENABLED: process.env.ENABLE_WAKE_WORD === "true",
|
||||||
|
SPEECH_TO_TEXT_ENABLED: process.env.ENABLE_SPEECH_TO_TEXT === "true",
|
||||||
|
WHISPER_MODEL_PATH: process.env.WHISPER_MODEL_PATH || "/models",
|
||||||
|
WHISPER_MODEL_TYPE: process.env.WHISPER_MODEL_TYPE || "base",
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
20
src/index.ts
20
src/index.ts
@@ -25,6 +25,8 @@ import {
|
|||||||
climateCommands,
|
climateCommands,
|
||||||
type Command,
|
type Command,
|
||||||
} from "./commands.js";
|
} from "./commands.js";
|
||||||
|
import { speechService } from "./speech/index.js";
|
||||||
|
import { APP_CONFIG } from "./config/app.config.js";
|
||||||
|
|
||||||
// Load environment variables based on NODE_ENV
|
// Load environment variables based on NODE_ENV
|
||||||
const envFile =
|
const envFile =
|
||||||
@@ -129,8 +131,19 @@ app.get("/health", () => ({
|
|||||||
status: "ok",
|
status: "ok",
|
||||||
timestamp: new Date().toISOString(),
|
timestamp: new Date().toISOString(),
|
||||||
version: "0.1.0",
|
version: "0.1.0",
|
||||||
|
speech_enabled: APP_CONFIG.SPEECH.ENABLED,
|
||||||
|
wake_word_enabled: APP_CONFIG.SPEECH.WAKE_WORD_ENABLED,
|
||||||
|
speech_to_text_enabled: APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
// Initialize speech service if enabled
|
||||||
|
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||||
|
console.log("Initializing speech service...");
|
||||||
|
speechService.initialize().catch((error) => {
|
||||||
|
console.error("Failed to initialize speech service:", error);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Create API endpoints for each tool
|
// Create API endpoints for each tool
|
||||||
tools.forEach((tool) => {
|
tools.forEach((tool) => {
|
||||||
app.post(`/api/tools/${tool.name}`, async ({ body }: { body: Record<string, unknown> }) => {
|
app.post(`/api/tools/${tool.name}`, async ({ body }: { body: Record<string, unknown> }) => {
|
||||||
@@ -145,7 +158,12 @@ app.listen(PORT, () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Handle server shutdown
|
// Handle server shutdown
|
||||||
process.on("SIGTERM", () => {
|
process.on("SIGTERM", async () => {
|
||||||
console.log("Received SIGTERM. Shutting down gracefully...");
|
console.log("Received SIGTERM. Shutting down gracefully...");
|
||||||
|
if (APP_CONFIG.SPEECH.ENABLED) {
|
||||||
|
await speechService.shutdown().catch((error) => {
|
||||||
|
console.error("Error shutting down speech service:", error);
|
||||||
|
});
|
||||||
|
}
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|||||||
0
src/speech/__tests__/fixtures/test.wav
Normal file
0
src/speech/__tests__/fixtures/test.wav
Normal file
116
src/speech/__tests__/speechToText.test.ts
Normal file
116
src/speech/__tests__/speechToText.test.ts
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
import { SpeechToText, WakeWordEvent, TranscriptionError } from '../speechToText';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
|
||||||
|
describe('SpeechToText', () => {
|
||||||
|
let speechToText: SpeechToText;
|
||||||
|
const testAudioDir = path.join(__dirname, 'test_audio');
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
speechToText = new SpeechToText('fast-whisper');
|
||||||
|
// Create test audio directory if it doesn't exist
|
||||||
|
if (!fs.existsSync(testAudioDir)) {
|
||||||
|
fs.mkdirSync(testAudioDir, { recursive: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
afterEach(() => {
|
||||||
|
speechToText.stopWakeWordDetection();
|
||||||
|
// Clean up test files
|
||||||
|
if (fs.existsSync(testAudioDir)) {
|
||||||
|
fs.rmSync(testAudioDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('checkHealth', () => {
|
||||||
|
it('should handle Docker not being available', async () => {
|
||||||
|
const isHealthy = await speechToText.checkHealth();
|
||||||
|
expect(isHealthy).toBeDefined();
|
||||||
|
expect(isHealthy).toBe(false);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('wake word detection', () => {
|
||||||
|
it('should detect new audio files and emit wake word events', (done) => {
|
||||||
|
const testFile = path.join(testAudioDir, 'wake_word_test_123456.wav');
|
||||||
|
const testMetadata = `${testFile}.json`;
|
||||||
|
|
||||||
|
speechToText.startWakeWordDetection(testAudioDir);
|
||||||
|
|
||||||
|
speechToText.on('wake_word', (event: WakeWordEvent) => {
|
||||||
|
expect(event).toBeDefined();
|
||||||
|
expect(event.audioFile).toBe(testFile);
|
||||||
|
expect(event.metadataFile).toBe(testMetadata);
|
||||||
|
expect(event.timestamp).toBe('123456');
|
||||||
|
done();
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create a test audio file to trigger the event
|
||||||
|
fs.writeFileSync(testFile, 'test audio content');
|
||||||
|
}, 1000);
|
||||||
|
|
||||||
|
it('should handle transcription errors when Docker is not available', (done) => {
|
||||||
|
const testFile = path.join(testAudioDir, 'wake_word_test_123456.wav');
|
||||||
|
|
||||||
|
let errorEmitted = false;
|
||||||
|
let wakeWordEmitted = false;
|
||||||
|
|
||||||
|
const checkDone = () => {
|
||||||
|
if (errorEmitted && wakeWordEmitted) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
speechToText.on('error', (error) => {
|
||||||
|
expect(error).toBeDefined();
|
||||||
|
expect(error).toBeInstanceOf(TranscriptionError);
|
||||||
|
expect(error.message).toContain('Failed to start Docker process');
|
||||||
|
errorEmitted = true;
|
||||||
|
checkDone();
|
||||||
|
});
|
||||||
|
|
||||||
|
speechToText.on('wake_word', () => {
|
||||||
|
wakeWordEmitted = true;
|
||||||
|
checkDone();
|
||||||
|
});
|
||||||
|
|
||||||
|
speechToText.startWakeWordDetection(testAudioDir);
|
||||||
|
|
||||||
|
// Create a test audio file to trigger the event
|
||||||
|
fs.writeFileSync(testFile, 'test audio content');
|
||||||
|
}, 1000);
|
||||||
|
});
|
||||||
|
|
||||||
|
describe('transcribeAudio', () => {
|
||||||
|
it('should handle Docker not being available for transcription', async () => {
|
||||||
|
await expect(
|
||||||
|
speechToText.transcribeAudio('/audio/test.wav')
|
||||||
|
).rejects.toThrow(TranscriptionError);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should emit progress events on error', (done) => {
|
||||||
|
let progressEmitted = false;
|
||||||
|
let errorThrown = false;
|
||||||
|
|
||||||
|
const checkDone = () => {
|
||||||
|
if (progressEmitted && errorThrown) {
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
speechToText.on('progress', (event: { type: string; data: string }) => {
|
||||||
|
expect(event.type).toBe('stderr');
|
||||||
|
expect(event.data).toBe('Failed to start Docker process');
|
||||||
|
progressEmitted = true;
|
||||||
|
checkDone();
|
||||||
|
});
|
||||||
|
|
||||||
|
speechToText.transcribeAudio('/audio/test.wav')
|
||||||
|
.catch((error) => {
|
||||||
|
expect(error).toBeInstanceOf(TranscriptionError);
|
||||||
|
errorThrown = true;
|
||||||
|
checkDone();
|
||||||
|
});
|
||||||
|
}, 1000);
|
||||||
|
});
|
||||||
|
});
|
||||||
110
src/speech/index.ts
Normal file
110
src/speech/index.ts
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
import { APP_CONFIG } from "../config/app.config.js";
|
||||||
|
import { logger } from "../utils/logger.js";
|
||||||
|
import type { IWakeWordDetector, ISpeechToText } from "./types.js";
|
||||||
|
|
||||||
|
class SpeechService {
|
||||||
|
private static instance: SpeechService | null = null;
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
private wakeWordDetector: IWakeWordDetector | null = null;
|
||||||
|
private speechToText: ISpeechToText | null = null;
|
||||||
|
|
||||||
|
private constructor() { }
|
||||||
|
|
||||||
|
public static getInstance(): SpeechService {
|
||||||
|
if (!SpeechService.instance) {
|
||||||
|
SpeechService.instance = new SpeechService();
|
||||||
|
}
|
||||||
|
return SpeechService.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!APP_CONFIG.SPEECH.ENABLED) {
|
||||||
|
logger.info("Speech features are disabled. Skipping initialization.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Initialize components based on configuration
|
||||||
|
if (APP_CONFIG.SPEECH.WAKE_WORD_ENABLED) {
|
||||||
|
logger.info("Initializing wake word detection...");
|
||||||
|
// Dynamic import to avoid loading the module if not needed
|
||||||
|
const { WakeWordDetector } = await import("./wakeWordDetector.js");
|
||||||
|
this.wakeWordDetector = new WakeWordDetector() as IWakeWordDetector;
|
||||||
|
await this.wakeWordDetector.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED) {
|
||||||
|
logger.info("Initializing speech-to-text...");
|
||||||
|
// Dynamic import to avoid loading the module if not needed
|
||||||
|
const { SpeechToText } = await import("./speechToText.js");
|
||||||
|
this.speechToText = new SpeechToText({
|
||||||
|
modelPath: APP_CONFIG.SPEECH.WHISPER_MODEL_PATH,
|
||||||
|
modelType: APP_CONFIG.SPEECH.WHISPER_MODEL_TYPE,
|
||||||
|
}) as ISpeechToText;
|
||||||
|
await this.speechToText.initialize();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isInitialized = true;
|
||||||
|
logger.info("Speech service initialized successfully");
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Failed to initialize speech service:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async shutdown(): Promise<void> {
|
||||||
|
if (!this.isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (this.wakeWordDetector) {
|
||||||
|
await this.wakeWordDetector.shutdown();
|
||||||
|
this.wakeWordDetector = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.speechToText) {
|
||||||
|
await this.speechToText.shutdown();
|
||||||
|
this.speechToText = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.isInitialized = false;
|
||||||
|
logger.info("Speech service shut down successfully");
|
||||||
|
} catch (error) {
|
||||||
|
logger.error("Error during speech service shutdown:", error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public isEnabled(): boolean {
|
||||||
|
return APP_CONFIG.SPEECH.ENABLED;
|
||||||
|
}
|
||||||
|
|
||||||
|
public isWakeWordEnabled(): boolean {
|
||||||
|
return APP_CONFIG.SPEECH.WAKE_WORD_ENABLED;
|
||||||
|
}
|
||||||
|
|
||||||
|
public isSpeechToTextEnabled(): boolean {
|
||||||
|
return APP_CONFIG.SPEECH.SPEECH_TO_TEXT_ENABLED;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getWakeWordDetector(): IWakeWordDetector {
|
||||||
|
if (!this.isInitialized || !this.wakeWordDetector) {
|
||||||
|
throw new Error("Wake word detector is not initialized");
|
||||||
|
}
|
||||||
|
return this.wakeWordDetector;
|
||||||
|
}
|
||||||
|
|
||||||
|
public getSpeechToText(): ISpeechToText {
|
||||||
|
if (!this.isInitialized || !this.speechToText) {
|
||||||
|
throw new Error("Speech-to-text is not initialized");
|
||||||
|
}
|
||||||
|
return this.speechToText;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export const speechService = SpeechService.getInstance();
|
||||||
247
src/speech/speechToText.ts
Normal file
247
src/speech/speechToText.ts
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
import { spawn } from 'child_process';
|
||||||
|
import { EventEmitter } from 'events';
|
||||||
|
import { watch } from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { ISpeechToText, SpeechToTextConfig } from "./types.js";
|
||||||
|
|
||||||
|
export interface TranscriptionOptions {
|
||||||
|
model?: 'tiny.en' | 'base.en' | 'small.en' | 'medium.en' | 'large-v2';
|
||||||
|
language?: string;
|
||||||
|
temperature?: number;
|
||||||
|
beamSize?: number;
|
||||||
|
patience?: number;
|
||||||
|
device?: 'cpu' | 'cuda';
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TranscriptionResult {
|
||||||
|
text: string;
|
||||||
|
segments: Array<{
|
||||||
|
text: string;
|
||||||
|
start: number;
|
||||||
|
end: number;
|
||||||
|
confidence: number;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WakeWordEvent {
|
||||||
|
timestamp: string;
|
||||||
|
audioFile: string;
|
||||||
|
metadataFile: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class TranscriptionError extends Error {
|
||||||
|
constructor(message: string) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'TranscriptionError';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export class SpeechToText extends EventEmitter implements ISpeechToText {
|
||||||
|
private containerName: string;
|
||||||
|
private audioWatcher?: ReturnType<typeof watch>;
|
||||||
|
private modelPath: string;
|
||||||
|
private modelType: string;
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
|
||||||
|
constructor(config: SpeechToTextConfig) {
|
||||||
|
super();
|
||||||
|
this.containerName = config.containerName || 'fast-whisper';
|
||||||
|
this.modelPath = config.modelPath;
|
||||||
|
this.modelType = config.modelType;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Initialization logic will be implemented here
|
||||||
|
await this.setupContainer();
|
||||||
|
this.isInitialized = true;
|
||||||
|
this.emit('ready');
|
||||||
|
} catch (error) {
|
||||||
|
this.emit('error', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async shutdown(): Promise<void> {
|
||||||
|
if (!this.isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Cleanup logic will be implemented here
|
||||||
|
await this.cleanupContainer();
|
||||||
|
this.isInitialized = false;
|
||||||
|
this.emit('shutdown');
|
||||||
|
} catch (error) {
|
||||||
|
this.emit('error', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async transcribe(audioData: Buffer): Promise<string> {
|
||||||
|
if (!this.isInitialized) {
|
||||||
|
throw new Error("Speech-to-text service is not initialized");
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
// Transcription logic will be implemented here
|
||||||
|
this.emit('transcribing');
|
||||||
|
const result = await this.processAudio(audioData);
|
||||||
|
this.emit('transcribed', result);
|
||||||
|
return result;
|
||||||
|
} catch (error) {
|
||||||
|
this.emit('error', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setupContainer(): Promise<void> {
|
||||||
|
// Container setup logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
|
||||||
|
private async cleanupContainer(): Promise<void> {
|
||||||
|
// Container cleanup logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
|
||||||
|
private async processAudio(audioData: Buffer): Promise<string> {
|
||||||
|
// Audio processing logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
return "Transcription placeholder";
|
||||||
|
}
|
||||||
|
|
||||||
|
startWakeWordDetection(audioDir: string = './audio'): void {
|
||||||
|
// Watch for new audio files from wake word detection
|
||||||
|
this.audioWatcher = watch(audioDir, (eventType, filename) => {
|
||||||
|
if (eventType === 'rename' && filename && filename.startsWith('wake_word_') && filename.endsWith('.wav')) {
|
||||||
|
const audioFile = path.join(audioDir, filename);
|
||||||
|
const metadataFile = `${audioFile}.json`;
|
||||||
|
const parts = filename.split('_');
|
||||||
|
const timestamp = parts[parts.length - 1].split('.')[0];
|
||||||
|
|
||||||
|
// Emit wake word event
|
||||||
|
this.emit('wake_word', {
|
||||||
|
timestamp,
|
||||||
|
audioFile,
|
||||||
|
metadataFile
|
||||||
|
} as WakeWordEvent);
|
||||||
|
|
||||||
|
// Automatically transcribe the wake word audio
|
||||||
|
this.transcribeAudio(audioFile)
|
||||||
|
.then(result => {
|
||||||
|
this.emit('transcription', { audioFile, result });
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
this.emit('error', error);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
stopWakeWordDetection(): void {
|
||||||
|
if (this.audioWatcher) {
|
||||||
|
this.audioWatcher.close();
|
||||||
|
this.audioWatcher = undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async transcribeAudio(
|
||||||
|
audioFilePath: string,
|
||||||
|
options: TranscriptionOptions = {}
|
||||||
|
): Promise<TranscriptionResult> {
|
||||||
|
const {
|
||||||
|
model = 'base.en',
|
||||||
|
language = 'en',
|
||||||
|
temperature = 0,
|
||||||
|
beamSize = 5,
|
||||||
|
patience = 1,
|
||||||
|
device = 'cpu'
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const args = [
|
||||||
|
'exec',
|
||||||
|
this.containerName,
|
||||||
|
'fast-whisper',
|
||||||
|
'--model', model,
|
||||||
|
'--language', language,
|
||||||
|
'--temperature', temperature.toString(),
|
||||||
|
'--beam-size', beamSize.toString(),
|
||||||
|
'--patience', patience.toString(),
|
||||||
|
'--device', device,
|
||||||
|
'--output-json',
|
||||||
|
audioFilePath
|
||||||
|
];
|
||||||
|
|
||||||
|
let process;
|
||||||
|
try {
|
||||||
|
process = spawn('docker', args);
|
||||||
|
} catch (error) {
|
||||||
|
this.emit('progress', { type: 'stderr', data: 'Failed to start Docker process' });
|
||||||
|
reject(new TranscriptionError('Failed to start Docker process'));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let stdout = '';
|
||||||
|
let stderr = '';
|
||||||
|
|
||||||
|
process.stdout?.on('data', (data: Buffer) => {
|
||||||
|
stdout += data.toString();
|
||||||
|
this.emit('progress', { type: 'stdout', data: data.toString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
process.stderr?.on('data', (data: Buffer) => {
|
||||||
|
stderr += data.toString();
|
||||||
|
this.emit('progress', { type: 'stderr', data: data.toString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('error', (error: Error) => {
|
||||||
|
this.emit('progress', { type: 'stderr', data: error.message });
|
||||||
|
reject(new TranscriptionError(`Failed to execute Docker command: ${error.message}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('close', (code: number) => {
|
||||||
|
if (code !== 0) {
|
||||||
|
reject(new TranscriptionError(`Transcription failed: ${stderr}`));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = JSON.parse(stdout) as TranscriptionResult;
|
||||||
|
resolve(result);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
if (error instanceof Error) {
|
||||||
|
reject(new TranscriptionError(`Failed to parse transcription result: ${error.message}`));
|
||||||
|
} else {
|
||||||
|
reject(new TranscriptionError('Failed to parse transcription result: Unknown error'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async checkHealth(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const process = spawn('docker', ['ps', '--filter', `name=${this.containerName}`, '--format', '{{.Status}}']);
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
let output = '';
|
||||||
|
process.stdout?.on('data', (data: Buffer) => {
|
||||||
|
output += data.toString();
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('error', () => {
|
||||||
|
resolve(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('close', (code: number) => {
|
||||||
|
resolve(code === 0 && output.toLowerCase().includes('up'));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
20
src/speech/types.ts
Normal file
20
src/speech/types.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { EventEmitter } from "events";
|
||||||
|
|
||||||
|
export interface IWakeWordDetector {
|
||||||
|
initialize(): Promise<void>;
|
||||||
|
shutdown(): Promise<void>;
|
||||||
|
startListening(): Promise<void>;
|
||||||
|
stopListening(): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ISpeechToText extends EventEmitter {
|
||||||
|
initialize(): Promise<void>;
|
||||||
|
shutdown(): Promise<void>;
|
||||||
|
transcribe(audioData: Buffer): Promise<string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface SpeechToTextConfig {
|
||||||
|
modelPath: string;
|
||||||
|
modelType: string;
|
||||||
|
containerName?: string;
|
||||||
|
}
|
||||||
64
src/speech/wakeWordDetector.ts
Normal file
64
src/speech/wakeWordDetector.ts
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import { IWakeWordDetector } from "./types.js";
|
||||||
|
|
||||||
|
export class WakeWordDetector implements IWakeWordDetector {
|
||||||
|
private isListening: boolean = false;
|
||||||
|
private isInitialized: boolean = false;
|
||||||
|
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
if (this.isInitialized) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// Initialization logic will be implemented here
|
||||||
|
await this.setupDetector();
|
||||||
|
this.isInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async shutdown(): Promise<void> {
|
||||||
|
if (this.isListening) {
|
||||||
|
await this.stopListening();
|
||||||
|
}
|
||||||
|
if (this.isInitialized) {
|
||||||
|
await this.cleanupDetector();
|
||||||
|
this.isInitialized = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async startListening(): Promise<void> {
|
||||||
|
if (!this.isInitialized) {
|
||||||
|
throw new Error("Wake word detector is not initialized");
|
||||||
|
}
|
||||||
|
if (this.isListening) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await this.startDetection();
|
||||||
|
this.isListening = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stopListening(): Promise<void> {
|
||||||
|
if (!this.isListening) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await this.stopDetection();
|
||||||
|
this.isListening = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async setupDetector(): Promise<void> {
|
||||||
|
// Setup logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
|
||||||
|
private async cleanupDetector(): Promise<void> {
|
||||||
|
// Cleanup logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
|
||||||
|
private async startDetection(): Promise<void> {
|
||||||
|
// Start detection logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
|
||||||
|
private async stopDetection(): Promise<void> {
|
||||||
|
// Stop detection logic will be implemented here
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100)); // Placeholder
|
||||||
|
}
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user