Compare commits

..

1 Commits

Author SHA1 Message Date
Richard Palethorpe
f289e824df fix: Handle state on agent restart and update observables
Keep some agent start across restarts, such as the SSE manager and
observer. This allows restarts to be shown on the state page and also
allows avatars to be kept when reconfiguring the agent.

Also observable updates can happen out of order because SSE manager has
multiple workers. For now handle this in the client.

Finally fix an issue with the IRC client to make it disconnect and
handle being assigned a different nickname by the server.

Signed-off-by: Richard Palethorpe <io@richiejp.com>
2025-04-23 07:23:51 +01:00
6 changed files with 20 additions and 80 deletions

View File

@@ -2,7 +2,7 @@
<img src="./webui/react-ui/public/logo_1.png" alt="LocalAGI Logo" width="220"/>
</p>
<h3 align="center"><em>Your AI. Your Hardware. Your Rules</em></h3>
<h3 align="center"><em>Your AI. Your Hardware. Your Rules.</em></h3>
<div align="center">
@@ -13,9 +13,9 @@
</div>
Create customizable AI assistants, automations, chat bots and agents that run 100% locally. No need for agentic Python libraries or cloud service keys, just bring your GPU (or even just CPU) and a web browser.
We empower you building AI Agents that you can run locally, without coding.
**LocalAGI** is a powerful, self-hostable AI Agent platform that allows you to design AI automations without writing code. A complete drop-in replacement for OpenAI's Responses APIs with advanced agentic capabilities. No clouds. No data leaks. Just pure local AI that works on consumer-grade hardware (CPU and GPU).
**LocalAGI** is a powerful, self-hostable AI Agent platform designed for maximum privacy and flexibility. A complete drop-in replacement for OpenAI's Responses APIs with advanced agentic capabilities. No clouds. No data leaks. Just pure local AI that works on consumer-grade hardware (CPU and GPU).
## 🛡️ Take Back Your Privacy
@@ -37,7 +37,6 @@ LocalAGI ensures your data stays exactly where you want it—on your hardware. N
- 🖼 **Multimodal Support**: Ready for vision, text, and more.
- 🔧 **Extensible Custom Actions**: Easily script dynamic agent behaviors in Go (interpreted, no compilation!).
- 🛠 **Fully Customizable Models**: Use your own models or integrate seamlessly with [LocalAI](https://github.com/mudler/LocalAI).
- 📊 **Observability**: Monitor agent status and view detailed observable updates in real-time.
## 🛠️ Quickstart
@@ -195,8 +194,6 @@ LocalAGI is part of the powerful Local family of privacy-focused AI tools:
![Web UI Dashboard](https://github.com/user-attachments/assets/a40194f9-af3a-461f-8b39-5f4612fbf221)
![Web UI Agent Settings](https://github.com/user-attachments/assets/fb3c3e2a-cd53-4ca8-97aa-c5da51ff1f83)
![Web UI Create Group](https://github.com/user-attachments/assets/102189a2-0fba-4a1e-b0cb-f99268ef8062)
![Web UI Agent Observability](https://github.com/user-attachments/assets/f7359048-9d28-4cf1-9151-1f5556ce9235)
### Connectors Ready-to-Go

View File

@@ -2,7 +2,6 @@ package agent
import (
"context"
"errors"
"fmt"
"os"
"sync"
@@ -990,6 +989,7 @@ func (a *Agent) periodicallyRun(timer *time.Timer) {
}
func (a *Agent) Run() error {
a.startNewConversationsConsumer()
xlog.Debug("Agent is now running", "agent", a.Character.Name)
// The agent run does two things:
@@ -1004,68 +1004,36 @@ func (a *Agent) Run() error {
// Expose a REST API to interact with the agent to ask it things
//todoTimer := time.NewTicker(a.options.periodicRuns)
timer := time.NewTimer(a.options.periodicRuns)
// we fire the periodicalRunner only once.
go a.periodicalRunRunner(timer)
var errs []error
var muErr sync.Mutex
var wg sync.WaitGroup
parallelJobs := a.options.parallelJobs
if a.options.parallelJobs == 0 {
parallelJobs = 1
}
for i := 0; i < parallelJobs; i++ {
xlog.Debug("Starting agent worker", "worker", i)
wg.Add(1)
go func() {
e := a.run(timer)
muErr.Lock()
errs = append(errs, e)
muErr.Unlock()
wg.Done()
}()
}
wg.Wait()
return errors.Join(errs...)
}
func (a *Agent) run(timer *time.Timer) error {
for {
xlog.Debug("Agent is now waiting for a new job", "agent", a.Character.Name)
select {
case job := <-a.jobQueue:
if !timer.Stop() {
<-timer.C
}
xlog.Debug("Agent is consuming a job", "agent", a.Character.Name, "job", job)
a.consumeJob(job, UserRole)
timer.Reset(a.options.periodicRuns)
a.loop(timer, job)
case <-a.context.Done():
// Agent has been canceled, return error
xlog.Warn("Agent has been canceled", "agent", a.Character.Name)
return ErrContextCanceled
}
}
}
func (a *Agent) periodicalRunRunner(timer *time.Timer) {
for {
select {
case <-a.context.Done():
// Agent has been canceled, return error
xlog.Warn("periodicalRunner has been canceled", "agent", a.Character.Name)
return
case <-timer.C:
a.periodicallyRun(timer)
}
}
}
func (a *Agent) loop(timer *time.Timer, job *types.Job) {
// Remember always to reset the timer - if we don't the agent will stop..
defer timer.Reset(a.options.periodicRuns)
// Consume the job and generate a response
// TODO: Give a short-term memory to the agent
// stop and drain the timer
if !timer.Stop() {
<-timer.C
}
xlog.Debug("Agent is consuming a job", "agent", a.Character.Name, "job", job)
a.consumeJob(job, UserRole)
}
func (a *Agent) Observer() Observer {
return a.observer
}

View File

@@ -54,8 +54,7 @@ type options struct {
newConversationsSubscribers []func(openai.ChatCompletionMessage)
observer Observer
parallelJobs int
observer Observer
}
func (o *options) SeparatedMultimodalModel() bool {
@@ -64,7 +63,6 @@ func (o *options) SeparatedMultimodalModel() bool {
func defaultOptions() *options {
return &options{
parallelJobs: 1,
periodicRuns: 15 * time.Minute,
LLMAPI: llmOptions{
APIURL: "http://localhost:8080",
@@ -140,13 +138,6 @@ func EnableKnowledgeBaseWithResults(results int) Option {
}
}
func WithParallelJobs(jobs int) Option {
return func(o *options) error {
o.parallelJobs = jobs
return nil
}
}
func WithNewConversationSubscriber(sub func(openai.ChatCompletionMessage)) Option {
return func(o *options) error {
o.newConversationsSubscribers = append(o.newConversationsSubscribers, sub)

View File

@@ -25,7 +25,6 @@ var _ = Describe("Agent test", func() {
agent, err = New(
WithLLMAPIURL(apiURL),
WithModel(testModel),
WithTimeout("10m"),
WithRandomIdentity(),
)
Expect(err).ToNot(HaveOccurred())

View File

@@ -61,7 +61,6 @@ type AgentConfig struct {
SystemPrompt string `json:"system_prompt" form:"system_prompt"`
LongTermMemory bool `json:"long_term_memory" form:"long_term_memory"`
SummaryLongTermMemory bool `json:"summary_long_term_memory" form:"summary_long_term_memory"`
ParallelJobs int `json:"parallel_jobs" form:"parallel_jobs"`
}
type AgentConfigMeta struct {
@@ -261,16 +260,6 @@ func NewAgentConfigMeta(
Step: 1,
Tags: config.Tags{Section: "AdvancedSettings"},
},
{
Name: "parallel_jobs",
Label: "Parallel Jobs",
Type: "number",
DefaultValue: 5,
Min: 1,
Step: 1,
HelpText: "Number of concurrent tasks that can run in parallel",
Tags: config.Tags{Section: "AdvancedSettings"},
},
},
MCPServers: []config.Field{
{

View File

@@ -524,10 +524,6 @@ func (a *AgentPool) startAgentWithConfig(name string, config *AgentConfig, obs O
opts = append(opts, WithLoopDetectionSteps(config.LoopDetectionSteps))
}
if config.ParallelJobs > 0 {
opts = append(opts, WithParallelJobs(config.ParallelJobs))
}
xlog.Info("Starting agent", "name", name, "config", config)
agent, err := New(opts...)