Compare commits
6 Commits
status
...
vit-config
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
62e08bf900 | ||
|
|
00ab476a77 | ||
|
|
906079cbbb | ||
|
|
808d9c981c | ||
|
|
2b79c99dd7 | ||
|
|
77905ed3cd |
43
README.md
43
README.md
@@ -49,10 +49,10 @@ cd LocalAGI
|
||||
docker compose up
|
||||
|
||||
# NVIDIA GPU setup
|
||||
docker compose --profile nvidia up
|
||||
docker compose -f docker-compose.nvidia.yaml up
|
||||
|
||||
# Intel GPU setup (for Intel Arc and integrated GPUs)
|
||||
docker compose --profile intel up
|
||||
docker compose -f docker-compose.intel.yaml up
|
||||
|
||||
# Start with a specific model (see available models in models.localai.io, or localai.io to use any model in huggingface)
|
||||
MODEL_NAME=gemma-3-12b-it docker compose up
|
||||
@@ -61,11 +61,40 @@ MODEL_NAME=gemma-3-12b-it docker compose up
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=flux.1-dev \
|
||||
docker compose --profile nvidia up
|
||||
docker compose -f docker-compose.nvidia.yaml up
|
||||
```
|
||||
|
||||
Now you can access and manage your agents at [http://localhost:8080](http://localhost:8080)
|
||||
|
||||
## 📚🆕 Local Stack Family
|
||||
|
||||
🆕 LocalAI is now part of a comprehensive suite of AI tools designed to work together:
|
||||
|
||||
<table>
|
||||
<tr>
|
||||
<td width="50%" valign="top">
|
||||
<a href="https://github.com/mudler/LocalAI">
|
||||
<img src="https://raw.githubusercontent.com/mudler/LocalAI/refs/heads/rebranding/core/http/static/logo_horizontal.png" width="300" alt="LocalAI Logo">
|
||||
</a>
|
||||
</td>
|
||||
<td width="50%" valign="top">
|
||||
<h3><a href="https://github.com/mudler/LocalRecall">LocalAI</a></h3>
|
||||
<p>LocalAI is the free, Open Source OpenAI alternative. LocalAI act as a drop-in replacement REST API that's compatible with OpenAI API specifications for local AI inferencing. Does not require GPU.</p>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td width="50%" valign="top">
|
||||
<a href="https://github.com/mudler/LocalRecall">
|
||||
<img src="https://raw.githubusercontent.com/mudler/LocalRecall/refs/heads/main/static/localrecall_horizontal.png" width="300" alt="LocalRecall Logo">
|
||||
</a>
|
||||
</td>
|
||||
<td width="50%" valign="top">
|
||||
<h3><a href="https://github.com/mudler/LocalRecall">LocalRecall</a></h3>
|
||||
<p>A REST-ful API and knowledge base management system that provides persistent memory and storage capabilities for AI agents.</p>
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
## 🖥️ Hardware Configurations
|
||||
|
||||
LocalAGI supports multiple hardware configurations through Docker Compose profiles:
|
||||
@@ -81,7 +110,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
||||
- Uses CUDA for acceleration
|
||||
- Best for high-performance inference
|
||||
- Supports text, multimodal, and image generation models
|
||||
- Run with: `docker compose --profile nvidia up`
|
||||
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
|
||||
- Default models:
|
||||
- Text: `arcee-agent`
|
||||
- Multimodal: `minicpm-v-2_6`
|
||||
@@ -97,7 +126,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
||||
- Uses SYCL for acceleration
|
||||
- Best for Intel-based systems
|
||||
- Supports text, multimodal, and image generation models
|
||||
- Run with: `docker compose --profile intel up`
|
||||
- Run with: `docker compose -f docker-compose.intel.yaml up`
|
||||
- Default models:
|
||||
- Text: `arcee-agent`
|
||||
- Multimodal: `minicpm-v-2_6`
|
||||
@@ -120,13 +149,13 @@ MODEL_NAME=gemma-3-12b-it docker compose up
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=flux.1-dev \
|
||||
docker compose --profile nvidia up
|
||||
docker compose -f docker-compose.nvidia.yaml up
|
||||
|
||||
# Intel GPU with custom models
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=sd-1.5-ggml \
|
||||
docker compose --profile intel up
|
||||
docker compose -f docker-compose.intel.yaml up
|
||||
```
|
||||
|
||||
If no models are specified, it will use the defaults:
|
||||
|
||||
@@ -260,6 +260,7 @@ var _ = Describe("Agent test", func() {
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithModel(testModel),
|
||||
WithLLMAPIKey(apiKeyURL),
|
||||
WithTimeout("10m"),
|
||||
WithNewConversationSubscriber(func(m openai.ChatCompletionMessage) {
|
||||
mu.Lock()
|
||||
message = m
|
||||
|
||||
33
docker-compose.intel.yaml
Normal file
33
docker-compose.intel.yaml
Normal file
@@ -0,0 +1,33 @@
|
||||
services:
|
||||
localai:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localai
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||
devices:
|
||||
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
||||
- /dev/dri/card1
|
||||
- /dev/dri/renderD129
|
||||
command:
|
||||
- ${MODEL_NAME:-arcee-agent}
|
||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- granite-embedding-107m-multilingual
|
||||
|
||||
localrecall:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localrecall
|
||||
|
||||
localrecall-healthcheck:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localrecall-healthcheck
|
||||
|
||||
localagi:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localagi
|
||||
31
docker-compose.nvidia.yaml
Normal file
31
docker-compose.nvidia.yaml
Normal file
@@ -0,0 +1,31 @@
|
||||
services:
|
||||
localai:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localai
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: 1
|
||||
capabilities: [gpu]
|
||||
|
||||
localrecall:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localrecall
|
||||
|
||||
localrecall-healthcheck:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localrecall-healthcheck
|
||||
|
||||
localagi:
|
||||
extends:
|
||||
file: docker-compose.yaml
|
||||
service: localagi
|
||||
@@ -7,8 +7,9 @@ services:
|
||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||
image: localai/localai:master-ffmpeg-core
|
||||
command:
|
||||
# - gemma-3-12b-it
|
||||
- ${MODEL_NAME:-arcee-agent}
|
||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- ${IMAGE_MODEL:-flux.1-dev}
|
||||
- granite-embedding-107m-multilingual
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||
@@ -24,44 +25,6 @@ services:
|
||||
- ./volumes/models:/build/models:cached
|
||||
- ./volumes/images:/tmp/generated/images
|
||||
|
||||
localai-nvidia:
|
||||
profiles: ["nvidia"]
|
||||
extends:
|
||||
service: localai
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
deploy:
|
||||
resources:
|
||||
reservations:
|
||||
devices:
|
||||
- driver: nvidia
|
||||
count: 1
|
||||
capabilities: [gpu]
|
||||
command:
|
||||
- ${MODEL_NAME:-arcee-agent}
|
||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- ${IMAGE_MODEL:-flux.1-dev}
|
||||
- granite-embedding-107m-multilingual
|
||||
|
||||
localai-intel:
|
||||
profiles: ["intel"]
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
extends:
|
||||
service: localai
|
||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||
devices:
|
||||
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
||||
- /dev/dri/card1
|
||||
- /dev/dri/renderD129
|
||||
command:
|
||||
- ${MODEL_NAME:-arcee-agent}
|
||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- granite-embedding-107m-multilingual
|
||||
|
||||
localrecall:
|
||||
image: quay.io/mudler/localrecall:main
|
||||
ports:
|
||||
@@ -97,6 +60,8 @@ services:
|
||||
#image: quay.io/mudler/localagi:master
|
||||
environment:
|
||||
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
|
||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||
@@ -106,32 +71,4 @@ services:
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
- ./volumes/localagi/:/pool
|
||||
|
||||
localagi-nvidia:
|
||||
profiles: ["nvidia"]
|
||||
extends:
|
||||
service: localagi
|
||||
environment:
|
||||
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
|
||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-flux.1-dev}
|
||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||
- LOCALAGI_STATE_DIR=/pool
|
||||
- LOCALAGI_TIMEOUT=5m
|
||||
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||
|
||||
localagi-intel:
|
||||
profiles: ["intel"]
|
||||
extends:
|
||||
service: localagi
|
||||
environment:
|
||||
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
|
||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||
- LOCALAGI_STATE_DIR=/pool
|
||||
- LOCALAGI_TIMEOUT=5m
|
||||
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||
- ./volumes/localagi/:/pool
|
||||
@@ -1,12 +1,13 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useParams, Link } from 'react-router-dom';
|
||||
import { useParams, Link, useNavigate } from 'react-router-dom';
|
||||
|
||||
function AgentStatus() {
|
||||
const { name } = useParams();
|
||||
const navigate = useNavigate();
|
||||
const [statusData, setStatusData] = useState(null);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState(null);
|
||||
const [_eventSource, setEventSource] = useState(null);
|
||||
const [eventSource, setEventSource] = useState(null);
|
||||
const [liveUpdates, setLiveUpdates] = useState([]);
|
||||
|
||||
// Update document title
|
||||
@@ -48,7 +49,7 @@ function AgentStatus() {
|
||||
const data = JSON.parse(event.data);
|
||||
setLiveUpdates(prev => [data, ...prev.slice(0, 19)]); // Keep last 20 updates
|
||||
} catch (err) {
|
||||
setLiveUpdates(prev => [event.data, ...prev.slice(0, 19)]);
|
||||
console.error('Error parsing SSE data:', err);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -128,9 +129,23 @@ function AgentStatus() {
|
||||
<h2 className="text-sm font-semibold mb-2">Agent Action:</h2>
|
||||
<div className="status-details">
|
||||
<div className="status-row">
|
||||
<span className="status-label">{index}</span>
|
||||
<span className="status-value">{formatValue(item)}</span>
|
||||
<span className="status-label">Result:</span>
|
||||
<span className="status-value">{formatValue(item.Result)}</span>
|
||||
</div>
|
||||
<div className="status-row">
|
||||
<span className="status-label">Action:</span>
|
||||
<span className="status-value">{formatValue(item.Action)}</span>
|
||||
</div>
|
||||
<div className="status-row">
|
||||
<span className="status-label">Parameters:</span>
|
||||
<span className="status-value pre-wrap">{formatValue(item.Params)}</span>
|
||||
</div>
|
||||
{item.Reasoning && (
|
||||
<div className="status-row">
|
||||
<span className="status-label">Reasoning:</span>
|
||||
<span className="status-value reasoning">{formatValue(item.Reasoning)}</span>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -30,6 +30,7 @@ export default defineConfig(({ mode }) => {
|
||||
'/status': backendUrl,
|
||||
'/action': backendUrl,
|
||||
'/actions': backendUrl,
|
||||
'/avatars': backendUrl
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"crypto/subtle"
|
||||
"embed"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
@@ -239,20 +238,9 @@ func (app *App) registerRoutes(pool *state.AgentPool, webapp *fiber.App) {
|
||||
history = &state.Status{ActionResults: []types.ActionState{}}
|
||||
}
|
||||
|
||||
entries := []string{}
|
||||
for _, h := range Reverse(history.Results()) {
|
||||
entries = append(entries, fmt.Sprintf(
|
||||
"Result: %v Action: %v Params: %v Reasoning: %v",
|
||||
h.Result,
|
||||
h.Action.Definition().Name,
|
||||
h.Params,
|
||||
h.Reasoning,
|
||||
))
|
||||
}
|
||||
|
||||
return c.JSON(fiber.Map{
|
||||
"Name": c.Params("name"),
|
||||
"History": entries,
|
||||
"History": Reverse(history.Results()),
|
||||
})
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user