Files
hass-addons/localai-p2p-worker/run.sh
alex 1364c3c492
Some checks failed
Builder / Build addons (localai-p2p-master, amd64) (push) Failing after 9s
Builder / Build addons (localai-p2p-worker, amd64) (push) Failing after 8s
Validate / validate (push) Failing after 2s
Builder / Create release (push) Has been skipped
test
2025-05-30 15:57:59 +02:00

65 lines
2.2 KiB
Bash

#!/command/with-contenv bashio
# shellcheck shell=bash
# ==============================================================================
# LocalAI P2P Worker - Home Assistant Add-on
# ==============================================================================
bashio::log.info "Starting LocalAI P2P Worker"
# Lire la configuration Home Assistant
TOKEN="$(bashio::config 'master_token')"
GPU_LAYERS="$(bashio::config 'gpu_layers')"
DEBUG="$(bashio::config 'debug')"
MODELS_PATH="$(bashio::config 'models_path')"
THREADS="$(bashio::config 'threads')"
bashio::log.info "Configuration loaded:"
bashio::log.info "- Master Token: ${TOKEN:0:20}..."
bashio::log.info "- GPU Layers: ${GPU_LAYERS}"
bashio::log.info "- Debug: ${DEBUG}"
bashio::log.info "- Models Path: ${MODELS_PATH}"
bashio::log.info "- Threads: ${THREADS}"
# Configurer le token P2P si fourni
if bashio::var.has_value "${TOKEN}"; then
export LOCALAI_P2P_TOKEN="${TOKEN}"
export P2P_TOKEN="${TOKEN}"
bashio::log.info "P2P Token configured"
else
bashio::log.warning "No P2P token provided - will generate one"
fi
# Créer le répertoire des modèles
bashio::log.info "Creating models directory: ${MODELS_PATH}"
mkdir -p "${MODELS_PATH}"
# Nettoyer les fichiers corrompus
bashio::log.info "Cleaning corrupted files..."
find "${MODELS_PATH}" -name "*.yaml" -exec grep -l "#!/usr/bin/with-contenv bashio\|mapping values are not allowed in this context" {} \; 2>/dev/null | xargs rm -f 2>/dev/null || true
# Configurer les variables d'environnement
export THREADS="${THREADS}"
export OMP_NUM_THREADS="${THREADS}"
# Configurer le debug si activé
if bashio::var.true "${DEBUG}"; then
export LOCALAI_DEBUG="true"
export LOCALAI_LOG_LEVEL="debug"
bashio::log.info "Debug mode enabled"
fi
# Configurer GPU layers si spécifié
if [ "${GPU_LAYERS}" -gt 0 ]; then
export LOCALAI_GPU_LAYERS="${GPU_LAYERS}"
bashio::log.info "GPU layers configured: ${GPU_LAYERS}"
fi
# Lancer LocalAI
bashio::log.info "Starting LocalAI with models path: ${MODELS_PATH}"
exec /usr/local/bin/local-ai run \
--models-path="${MODELS_PATH}" \
--threads="${THREADS}" \
--address="0.0.0.0:8080" \
--cors \
--cors-allow-origins="*"