diff --git a/Makefile b/Makefile index cb71f78..6edd743 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ cleanup-tests: docker compose down tests: prepare-tests - LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./... + LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./... run-nokb: $(MAKE) run KBDISABLEINDEX=true diff --git a/README.md b/README.md index eff412b..7af1471 100644 --- a/README.md +++ b/README.md @@ -114,7 +114,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil - Supports text, multimodal, and image generation models - Run with: `docker compose -f docker-compose.nvidia.yaml up` - Default models: - - Text: `arcee-agent` + - Text: `gemma-3-12b-it-qat` - Multimodal: `minicpm-v-2_6` - Image: `sd-1.5-ggml` - Environment variables: @@ -130,7 +130,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil - Supports text, multimodal, and image generation models - Run with: `docker compose -f docker-compose.intel.yaml up` - Default models: - - Text: `arcee-agent` + - Text: `gemma-3-12b-it-qat` - Multimodal: `minicpm-v-2_6` - Image: `sd-1.5-ggml` - Environment variables: @@ -161,7 +161,7 @@ docker compose -f docker-compose.intel.yaml up ``` If no models are specified, it will use the defaults: -- Text model: `arcee-agent` +- Text model: `gemma-3-12b-it-qat` - Multimodal model: `minicpm-v-2_6` - Image model: `sd-1.5-ggml` diff --git a/core/agent/agent_test.go b/core/agent/agent_test.go index 2f06c30..0b80945 100644 --- a/core/agent/agent_test.go +++ b/core/agent/agent_test.go @@ -226,7 +226,10 @@ var _ = Describe("Agent test", func() { WithLLMAPIKey(apiKeyURL), WithTimeout("10m"), WithActions( - actions.NewSearch(map[string]string{}), + &TestAction{response: map[string]string{ + "boston": testActionResult, + "milan": testActionResult2, + }}, ), EnablePlanning, EnableForceReasoning, @@ -238,18 +241,21 @@ var _ = Describe("Agent test", func() { defer agent.Stop() result := agent.Ask( - types.WithText("Thoroughly plan a trip to San Francisco from Venice, Italy; check flight times, visa requirements and whether electrical items are allowed in cabin luggage."), + types.WithText("Use the plan tool to do two actions in sequence: search for the weather in boston and search for the weather in milan"), ) Expect(len(result.State)).To(BeNumerically(">", 1)) actionsExecuted := []string{} + actionResults := []string{} for _, r := range result.State { xlog.Info(r.Result) actionsExecuted = append(actionsExecuted, r.Action.Definition().Name.String()) + actionResults = append(actionResults, r.ActionResult.Result) } - Expect(actionsExecuted).To(ContainElement("search_internet"), fmt.Sprint(result)) + Expect(actionsExecuted).To(ContainElement("get_weather"), fmt.Sprint(result)) Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result)) - + Expect(actionResults).To(ContainElement(testActionResult), fmt.Sprint(result)) + Expect(actionResults).To(ContainElement(testActionResult2), fmt.Sprint(result)) }) It("Can initiate conversations", func() { diff --git a/docker-compose.yaml b/docker-compose.yaml index 779a352..ba10c72 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -7,7 +7,7 @@ services: # Image list (dockerhub): https://hub.docker.com/r/localai/localai image: localai/localai:master-ffmpeg-core command: - - ${MODEL_NAME:-arcee-agent} + - ${MODEL_NAME:-gemma-3-12b-it-qat} - ${MULTIMODAL_MODEL:-minicpm-v-2_6} - ${IMAGE_MODEL:-sd-1.5-ggml} - granite-embedding-107m-multilingual @@ -59,7 +59,7 @@ services: - 8080:3000 #image: quay.io/mudler/localagi:master environment: - - LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent} + - LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-12b-it-qat} - LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6} - LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml} - LOCALAGI_LLM_API_URL=http://localai:8080