Compare commits
2 Commits
release/v2
...
chore/gemm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
69f047ed62 | ||
|
|
ef90ecd812 |
2
Makefile
2
Makefile
@@ -9,7 +9,7 @@ cleanup-tests:
|
|||||||
docker compose down
|
docker compose down
|
||||||
|
|
||||||
tests: prepare-tests
|
tests: prepare-tests
|
||||||
LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||||
|
|
||||||
run-nokb:
|
run-nokb:
|
||||||
$(MAKE) run KBDISABLEINDEX=true
|
$(MAKE) run KBDISABLEINDEX=true
|
||||||
|
|||||||
@@ -114,7 +114,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
|||||||
- Supports text, multimodal, and image generation models
|
- Supports text, multimodal, and image generation models
|
||||||
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
|
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
|
||||||
- Default models:
|
- Default models:
|
||||||
- Text: `arcee-agent`
|
- Text: `gemma-3-12b-it-qat`
|
||||||
- Multimodal: `minicpm-v-2_6`
|
- Multimodal: `minicpm-v-2_6`
|
||||||
- Image: `sd-1.5-ggml`
|
- Image: `sd-1.5-ggml`
|
||||||
- Environment variables:
|
- Environment variables:
|
||||||
@@ -130,7 +130,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
|||||||
- Supports text, multimodal, and image generation models
|
- Supports text, multimodal, and image generation models
|
||||||
- Run with: `docker compose -f docker-compose.intel.yaml up`
|
- Run with: `docker compose -f docker-compose.intel.yaml up`
|
||||||
- Default models:
|
- Default models:
|
||||||
- Text: `arcee-agent`
|
- Text: `gemma-3-12b-it-qat`
|
||||||
- Multimodal: `minicpm-v-2_6`
|
- Multimodal: `minicpm-v-2_6`
|
||||||
- Image: `sd-1.5-ggml`
|
- Image: `sd-1.5-ggml`
|
||||||
- Environment variables:
|
- Environment variables:
|
||||||
@@ -161,7 +161,7 @@ docker compose -f docker-compose.intel.yaml up
|
|||||||
```
|
```
|
||||||
|
|
||||||
If no models are specified, it will use the defaults:
|
If no models are specified, it will use the defaults:
|
||||||
- Text model: `arcee-agent`
|
- Text model: `gemma-3-12b-it-qat`
|
||||||
- Multimodal model: `minicpm-v-2_6`
|
- Multimodal model: `minicpm-v-2_6`
|
||||||
- Image model: `sd-1.5-ggml`
|
- Image model: `sd-1.5-ggml`
|
||||||
|
|
||||||
|
|||||||
@@ -226,7 +226,10 @@ var _ = Describe("Agent test", func() {
|
|||||||
WithLLMAPIKey(apiKeyURL),
|
WithLLMAPIKey(apiKeyURL),
|
||||||
WithTimeout("10m"),
|
WithTimeout("10m"),
|
||||||
WithActions(
|
WithActions(
|
||||||
actions.NewSearch(map[string]string{}),
|
&TestAction{response: map[string]string{
|
||||||
|
"boston": testActionResult,
|
||||||
|
"milan": testActionResult2,
|
||||||
|
}},
|
||||||
),
|
),
|
||||||
EnablePlanning,
|
EnablePlanning,
|
||||||
EnableForceReasoning,
|
EnableForceReasoning,
|
||||||
@@ -238,18 +241,21 @@ var _ = Describe("Agent test", func() {
|
|||||||
defer agent.Stop()
|
defer agent.Stop()
|
||||||
|
|
||||||
result := agent.Ask(
|
result := agent.Ask(
|
||||||
types.WithText("Thoroughly plan a trip to San Francisco from Venice, Italy; check flight times, visa requirements and whether electrical items are allowed in cabin luggage."),
|
types.WithText("Use the plan tool to do two actions in sequence: search for the weather in boston and search for the weather in milan"),
|
||||||
)
|
)
|
||||||
Expect(len(result.State)).To(BeNumerically(">", 1))
|
Expect(len(result.State)).To(BeNumerically(">", 1))
|
||||||
|
|
||||||
actionsExecuted := []string{}
|
actionsExecuted := []string{}
|
||||||
|
actionResults := []string{}
|
||||||
for _, r := range result.State {
|
for _, r := range result.State {
|
||||||
xlog.Info(r.Result)
|
xlog.Info(r.Result)
|
||||||
actionsExecuted = append(actionsExecuted, r.Action.Definition().Name.String())
|
actionsExecuted = append(actionsExecuted, r.Action.Definition().Name.String())
|
||||||
|
actionResults = append(actionResults, r.ActionResult.Result)
|
||||||
}
|
}
|
||||||
Expect(actionsExecuted).To(ContainElement("search_internet"), fmt.Sprint(result))
|
Expect(actionsExecuted).To(ContainElement("get_weather"), fmt.Sprint(result))
|
||||||
Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result))
|
Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result))
|
||||||
|
Expect(actionResults).To(ContainElement(testActionResult), fmt.Sprint(result))
|
||||||
|
Expect(actionResults).To(ContainElement(testActionResult2), fmt.Sprint(result))
|
||||||
})
|
})
|
||||||
|
|
||||||
It("Can initiate conversations", func() {
|
It("Can initiate conversations", func() {
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ services:
|
|||||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
image: localai/localai:master-ffmpeg-core
|
image: localai/localai:master-ffmpeg-core
|
||||||
command:
|
command:
|
||||||
- ${MODEL_NAME:-arcee-agent}
|
- ${MODEL_NAME:-gemma-3-12b-it-qat}
|
||||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||||
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
||||||
- granite-embedding-107m-multilingual
|
- granite-embedding-107m-multilingual
|
||||||
@@ -59,7 +59,7 @@ services:
|
|||||||
- 8080:3000
|
- 8080:3000
|
||||||
#image: quay.io/mudler/localagi:master
|
#image: quay.io/mudler/localagi:master
|
||||||
environment:
|
environment:
|
||||||
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
|
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-12b-it-qat}
|
||||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||||
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
|
|||||||
Reference in New Issue
Block a user