From 471ccb1589fd0ce937248d0ecfef9acf990bdcf6 Mon Sep 17 00:00:00 2001 From: alex Date: Fri, 30 May 2025 15:48:39 +0200 Subject: [PATCH] fde --- localai-p2p-worker/config.yaml | 2 +- localai-p2p-worker/run.sh | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/localai-p2p-worker/config.yaml b/localai-p2p-worker/config.yaml index 215ebca..d2322cc 100644 --- a/localai-p2p-worker/config.yaml +++ b/localai-p2p-worker/config.yaml @@ -1,5 +1,5 @@ name: "LocalAI P2P Worker" -version: "1.0.4.4" +version: "1.0.4.5" slug: "localai-p2p-worker" description: "LocalAI P2P federation worker node" arch: diff --git a/localai-p2p-worker/run.sh b/localai-p2p-worker/run.sh index 03ddcd3..b043e38 100644 --- a/localai-p2p-worker/run.sh +++ b/localai-p2p-worker/run.sh @@ -1,19 +1,28 @@ #!/bin/bash +# Sourcer bashio pour utiliser les fonctions de configuration Home Assistant +source /usr/lib/bashio/bashio + echo "Starting LocalAI P2P Worker" # Lire la configuration Home Assistant if [ -f /data/options.json ]; then TOKEN="$(bashio::config 'master_token' 2>/dev/null || echo '')" + GPU_LAYERS="$(bashio::config 'gpu_layers' 2>/dev/null || echo '0')" + DEBUG="$(bashio::config 'debug' 2>/dev/null || echo 'false')" MODELS_PATH="$(bashio::config 'models_path' 2>/dev/null || echo '/share/localai/models')" THREADS="$(bashio::config 'threads' 2>/dev/null || echo '8')" echo "Master Token: $TOKEN" + echo "GPU Layers: $GPU_LAYERS" + echo "Debug: $DEBUG" echo "Models Path: $MODELS_PATH" echo "Threads: $THREADS" else echo "No Home Assistant config found, using defaults" TOKEN="" + GPU_LAYERS="0" + DEBUG="false" MODELS_PATH="/share/localai/models" THREADS="8" fi @@ -37,6 +46,19 @@ find "$MODELS_PATH" -name "*.yaml" -exec grep -l "#!/usr/bin/with-contenv bashio export THREADS="$THREADS" export OMP_NUM_THREADS="$THREADS" +# Configurer le debug si activé +if [ "$DEBUG" = "true" ]; then + export LOCALAI_DEBUG="true" + export LOCALAI_LOG_LEVEL="debug" + echo "Debug mode enabled" +fi + +# Configurer GPU layers si spécifié +if [ "$GPU_LAYERS" -gt 0 ]; then + export LOCALAI_GPU_LAYERS="$GPU_LAYERS" + echo "GPU layers configured: $GPU_LAYERS" +fi + # Lancer LocalAI echo "Starting LocalAI with models path: $MODELS_PATH" exec /build/local-ai run \