diff --git a/Makefile b/Makefile index cb71f78..6edd743 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ cleanup-tests: docker compose down tests: prepare-tests - LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./... + LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./... run-nokb: $(MAKE) run KBDISABLEINDEX=true diff --git a/README.md b/README.md index eff412b..7af1471 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil - Supports text, multimodal, and image generation models - Run with: `docker compose -f docker-compose.nvidia.yaml up` - Default models: - - Text: `arcee-agent` + - Text: `gemma-3-12b-it-qat` - Multimodal: `minicpm-v-2_6` - Image: `sd-1.5-ggml` - Environment variables: @@ -130,7 +130,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil - Supports text, multimodal, and image generation models - Run with: `docker compose -f docker-compose.intel.yaml up` - Default models: - - Text: `arcee-agent` + - Text: `gemma-3-12b-it-qat` - Multimodal: `minicpm-v-2_6` - Image: `sd-1.5-ggml` - Environment variables: @@ -161,7 +161,7 @@ docker compose -f docker-compose.intel.yaml up ``` If no models are specified, it will use the defaults: -- Text model: `arcee-agent` +- Text model: `gemma-3-12b-it-qat` - Multimodal model: `minicpm-v-2_6` - Image model: `sd-1.5-ggml` diff --git a/docker-compose.yaml b/docker-compose.yaml index 779a352..ba10c72 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -7,7 +7,7 @@ services: # Image list (dockerhub): https://hub.docker.com/r/localai/localai image: localai/localai:master-ffmpeg-core command: - - ${MODEL_NAME:-arcee-agent} + - ${MODEL_NAME:-gemma-3-12b-it-qat} - ${MULTIMODAL_MODEL:-minicpm-v-2_6} - ${IMAGE_MODEL:-sd-1.5-ggml} - granite-embedding-107m-multilingual @@ -59,7 +59,7 @@ services: - 8080:3000 #image: quay.io/mudler/localagi:master environment: - - LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent} + - LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-12b-it-qat} - LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6} - LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml} - LOCALAGI_LLM_API_URL=http://localai:8080