docker compose unification, small changes

Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
mudler
2025-04-12 18:17:43 +02:00
parent 4858f85ade
commit 0ac5c13c4d
6 changed files with 161 additions and 175 deletions

View File

@@ -24,14 +24,44 @@ services:
- ./volumes/models:/build/models:cached
- ./volumes/images:/tmp/generated/images
# decomment the following piece if running with Nvidia GPUs
# deploy:
# resources:
# reservations:
# devices:
# - driver: nvidia
# count: 1
# capabilities: [gpu]
localai-nvidia:
profiles: ["nvidia"]
extends:
service: localai
environment:
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
- DEBUG=true
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: 1
capabilities: [gpu]
command:
- ${MODEL_NAME:-openthinker-7b}
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
- ${IMAGE_MODEL:-flux.1-dev}
- granite-embedding-107m-multilingual
localai-intel:
profiles: ["intel"]
environment:
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
- DEBUG=true
extends:
service: localai
image: localai/localai:master-sycl-f32-ffmpeg-core
devices:
# On a system with integrated GPU and an Arc 770, this is the Arc 770
- /dev/dri/card1
- /dev/dri/renderD129
command:
- ${MODEL_NAME:-openthinker-7b}
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
- ${IMAGE_MODEL:-sd-1.5-ggml}
- granite-embedding-107m-multilingual
localrecall:
image: quay.io/mudler/localrecall:main
ports:
@@ -77,3 +107,31 @@ services:
- "host.docker.internal:host-gateway"
volumes:
- ./volumes/localagi/:/pool
localagi-nvidia:
profiles: ["nvidia"]
extends:
service: localagi
environment:
- LOCALAGI_MODEL=${MODEL_NAME:-openthinker-7b}
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-flux.1-dev}
- LOCALAGI_LLM_API_URL=http://localai:8080
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
- LOCALAGI_STATE_DIR=/pool
- LOCALAGI_TIMEOUT=5m
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
localagi-intel:
profiles: ["intel"]
extends:
service: localagi
environment:
- LOCALAGI_MODEL=${MODEL_NAME:-openthinker-7b}
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
- LOCALAGI_LLM_API_URL=http://localai:8080
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
- LOCALAGI_STATE_DIR=/pool
- LOCALAGI_TIMEOUT=5m
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false