2
Makefile
2
Makefile
@@ -9,7 +9,7 @@ cleanup-tests:
|
|||||||
docker compose down
|
docker compose down
|
||||||
|
|
||||||
tests: prepare-tests
|
tests: prepare-tests
|
||||||
LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
LOCALAGI_MODEL="gemma-3-4b-it" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||||
|
|
||||||
run-nokb:
|
run-nokb:
|
||||||
$(MAKE) run KBDISABLEINDEX=true
|
$(MAKE) run KBDISABLEINDEX=true
|
||||||
|
|||||||
@@ -456,7 +456,7 @@ func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.
|
|||||||
return nil, nil, "", err
|
return nil, nil, "", err
|
||||||
}
|
}
|
||||||
originalReasoning := thought.Content
|
originalReasoning := thought.Content
|
||||||
|
xlog.Debug("[pickAction] original reasoning", "originalReasoning", originalReasoning)
|
||||||
// From the thought, get the action call
|
// From the thought, get the action call
|
||||||
// Get all the available actions IDs
|
// Get all the available actions IDs
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ services:
|
|||||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||||
command:
|
command:
|
||||||
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
||||||
- arcee-agent # (smaller)
|
- gemma-3-4b-it # (smaller)
|
||||||
- granite-embedding-107m-multilingual
|
- granite-embedding-107m-multilingual
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
@@ -62,7 +62,7 @@ services:
|
|||||||
- 8080:3000
|
- 8080:3000
|
||||||
image: quay.io/mudler/localagi:master
|
image: quay.io/mudler/localagi:master
|
||||||
environment:
|
environment:
|
||||||
- LOCALAGI_MODEL=arcee-agent
|
- LOCALAGI_MODEL=gemma-3-4b-it
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ services:
|
|||||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
image: localai/localai:master-ffmpeg-core
|
image: localai/localai:master-ffmpeg-core
|
||||||
command:
|
command:
|
||||||
- arcee-agent # (smaller)
|
- gemma-3-4b-it
|
||||||
- granite-embedding-107m-multilingual
|
- granite-embedding-107m-multilingual
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
@@ -65,7 +65,7 @@ services:
|
|||||||
- 8080:3000
|
- 8080:3000
|
||||||
#image: quay.io/mudler/localagi:master
|
#image: quay.io/mudler/localagi:master
|
||||||
environment:
|
environment:
|
||||||
- LOCALAGI_MODEL=arcee-agent
|
- LOCALAGI_MODEL=gemma-3-4b-it
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
|
|||||||
Reference in New Issue
Block a user