Back at arcee-agent as default

Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
mudler
2025-04-12 18:46:17 +02:00
parent 289a6ce4c8
commit 7fca3620f6
3 changed files with 10 additions and 10 deletions

View File

@@ -9,7 +9,7 @@ cleanup-tests:
docker compose down docker compose down
tests: prepare-tests tests: prepare-tests
LOCALAGI_MODEL="gemma-3-4b-it" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./... LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
run-nokb: run-nokb:
$(MAKE) run KBDISABLEINDEX=true $(MAKE) run KBDISABLEINDEX=true

View File

@@ -83,7 +83,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
- Supports text, multimodal, and image generation models - Supports text, multimodal, and image generation models
- Run with: `docker compose --profile nvidia up` - Run with: `docker compose --profile nvidia up`
- Default models: - Default models:
- Text: `gemma-3-4b-it` - Text: `arcee-agent`
- Multimodal: `minicpm-v-2_6` - Multimodal: `minicpm-v-2_6`
- Image: `flux.1-dev` - Image: `flux.1-dev`
- Environment variables: - Environment variables:
@@ -99,7 +99,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
- Supports text, multimodal, and image generation models - Supports text, multimodal, and image generation models
- Run with: `docker compose --profile intel up` - Run with: `docker compose --profile intel up`
- Default models: - Default models:
- Text: `gemma-3-4b-it` - Text: `arcee-agent`
- Multimodal: `minicpm-v-2_6` - Multimodal: `minicpm-v-2_6`
- Image: `sd-1.5-ggml` - Image: `sd-1.5-ggml`
- Environment variables: - Environment variables:
@@ -130,7 +130,7 @@ docker compose --profile intel up
``` ```
If no models are specified, it will use the defaults: If no models are specified, it will use the defaults:
- Text model: `gemma-3-4b-it` - Text model: `arcee-agent`
- Multimodal model: `minicpm-v-2_6` - Multimodal model: `minicpm-v-2_6`
- Image model: `flux.1-dev` (NVIDIA) or `sd-1.5-ggml` (Intel) - Image model: `flux.1-dev` (NVIDIA) or `sd-1.5-ggml` (Intel)

View File

@@ -8,7 +8,7 @@ services:
image: localai/localai:master-ffmpeg-core image: localai/localai:master-ffmpeg-core
command: command:
# - gemma-3-12b-it # - gemma-3-12b-it
- ${MODEL_NAME:-gemma-3-4b-it} - ${MODEL_NAME:-arcee-agent}
- granite-embedding-107m-multilingual - granite-embedding-107m-multilingual
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"] test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
@@ -39,7 +39,7 @@ services:
count: 1 count: 1
capabilities: [gpu] capabilities: [gpu]
command: command:
- ${MODEL_NAME:-gemma-3-4b-it} - ${MODEL_NAME:-arcee-agent}
- ${MULTIMODAL_MODEL:-minicpm-v-2_6} - ${MULTIMODAL_MODEL:-minicpm-v-2_6}
- ${IMAGE_MODEL:-flux.1-dev} - ${IMAGE_MODEL:-flux.1-dev}
- granite-embedding-107m-multilingual - granite-embedding-107m-multilingual
@@ -57,7 +57,7 @@ services:
- /dev/dri/card1 - /dev/dri/card1
- /dev/dri/renderD129 - /dev/dri/renderD129
command: command:
- ${MODEL_NAME:-gemma-3-4b-it} - ${MODEL_NAME:-arcee-agent}
- ${MULTIMODAL_MODEL:-minicpm-v-2_6} - ${MULTIMODAL_MODEL:-minicpm-v-2_6}
- ${IMAGE_MODEL:-sd-1.5-ggml} - ${IMAGE_MODEL:-sd-1.5-ggml}
- granite-embedding-107m-multilingual - granite-embedding-107m-multilingual
@@ -96,7 +96,7 @@ services:
- 8080:3000 - 8080:3000
#image: quay.io/mudler/localagi:master #image: quay.io/mudler/localagi:master
environment: environment:
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-4b-it} - LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
- LOCALAGI_LLM_API_URL=http://localai:8080 - LOCALAGI_LLM_API_URL=http://localai:8080
#- LOCALAGI_LLM_API_KEY=sk-1234567890 #- LOCALAGI_LLM_API_KEY=sk-1234567890
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080 - LOCALAGI_LOCALRAG_URL=http://localrecall:8080
@@ -113,7 +113,7 @@ services:
extends: extends:
service: localagi service: localagi
environment: environment:
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-4b-it} - LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6} - LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-flux.1-dev} - LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-flux.1-dev}
- LOCALAGI_LLM_API_URL=http://localai:8080 - LOCALAGI_LLM_API_URL=http://localai:8080
@@ -127,7 +127,7 @@ services:
extends: extends:
service: localagi service: localagi
environment: environment:
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-4b-it} - LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6} - LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml} - LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
- LOCALAGI_LLM_API_URL=http://localai:8080 - LOCALAGI_LLM_API_URL=http://localai:8080