Compare commits

..

1 Commits

Author SHA1 Message Date
Ettore Di Giacinto
a33425c707 fix(github*): pass by correctly owner and repository
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
2025-04-17 22:58:59 +02:00
25 changed files with 168 additions and 968 deletions

View File

@@ -9,7 +9,7 @@ cleanup-tests:
docker compose down
tests: prepare-tests
LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
run-nokb:
$(MAKE) run KBDISABLEINDEX=true

View File

@@ -114,7 +114,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
- Supports text, multimodal, and image generation models
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
- Default models:
- Text: `gemma-3-12b-it-qat`
- Text: `arcee-agent`
- Multimodal: `minicpm-v-2_6`
- Image: `sd-1.5-ggml`
- Environment variables:
@@ -130,7 +130,7 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
- Supports text, multimodal, and image generation models
- Run with: `docker compose -f docker-compose.intel.yaml up`
- Default models:
- Text: `gemma-3-12b-it-qat`
- Text: `arcee-agent`
- Multimodal: `minicpm-v-2_6`
- Image: `sd-1.5-ggml`
- Environment variables:
@@ -161,7 +161,7 @@ docker compose -f docker-compose.intel.yaml up
```
If no models are specified, it will use the defaults:
- Text model: `gemma-3-12b-it-qat`
- Text model: `arcee-agent`
- Multimodal model: `minicpm-v-2_6`
- Image model: `sd-1.5-ggml`

View File

@@ -2,6 +2,7 @@ package action
import (
"context"
"fmt"
"github.com/mudler/LocalAGI/core/types"
"github.com/sashabaranov/go-openai/jsonschema"
@@ -15,6 +16,24 @@ func NewState() *StateAction {
type StateAction struct{}
// State is the structure
// that is used to keep track of the current state
// and the Agent's short memory that it can update
// Besides a long term memory that is accessible by the agent (With vector database),
// And a context memory (that is always powered by a vector database),
// this memory is the shorter one that the LLM keeps across conversation and across its
// reasoning process's and life time.
// TODO: A special action is then used to let the LLM itself update its memory
// periodically during self-processing, and the same action is ALSO exposed
// during the conversation to let the user put for example, a new goal to the agent.
type AgentInternalState struct {
NowDoing string `json:"doing_now"`
DoingNext string `json:"doing_next"`
DoneHistory []string `json:"done_history"`
Memories []string `json:"memories"`
Goal string `json:"goal"`
}
func (a *StateAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
return types.ActionResult{Result: "internal state has been updated"}, nil
}
@@ -57,3 +76,23 @@ func (a *StateAction) Definition() types.ActionDefinition {
},
}
}
const fmtT = `=====================
NowDoing: %s
DoingNext: %s
Your current goal is: %s
You have done: %+v
You have a short memory with: %+v
=====================
`
func (c AgentInternalState) String() string {
return fmt.Sprintf(
fmtT,
c.NowDoing,
c.DoingNext,
c.Goal,
c.DoneHistory,
c.Memories,
)
}

View File

@@ -22,7 +22,7 @@ type decisionResult struct {
// decision forces the agent to take one of the available actions
func (a *Agent) decision(
job *types.Job,
ctx context.Context,
conversation []openai.ChatCompletionMessage,
tools []openai.Tool, toolchoice string, maxRetries int) (*decisionResult, error) {
@@ -35,63 +35,31 @@ func (a *Agent) decision(
}
}
decision := openai.ChatCompletionRequest{
Model: a.options.LLMAPI.Model,
Messages: conversation,
Tools: tools,
}
if choice != nil {
decision.ToolChoice = *choice
}
var obs *types.Observable
if job.Obs != nil {
obs = a.observer.NewObservable()
obs.Name = "decision"
obs.ParentID = job.Obs.ID
obs.Icon = "brain"
obs.Creation = &types.Creation{
ChatCompletionRequest: &decision,
}
a.observer.Update(*obs)
}
var lastErr error
for attempts := 0; attempts < maxRetries; attempts++ {
resp, err := a.client.CreateChatCompletion(job.GetContext(), decision)
decision := openai.ChatCompletionRequest{
Model: a.options.LLMAPI.Model,
Messages: conversation,
Tools: tools,
}
if choice != nil {
decision.ToolChoice = *choice
}
resp, err := a.client.CreateChatCompletion(ctx, decision)
if err != nil {
lastErr = err
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", err)
if obs != nil {
obs.Progress = append(obs.Progress, types.Progress{
Error: err.Error(),
})
a.observer.Update(*obs)
}
continue
}
jsonResp, _ := json.Marshal(resp)
xlog.Debug("Decision response", "response", string(jsonResp))
if obs != nil {
obs.AddProgress(types.Progress{
ChatCompletionResponse: &resp,
})
}
if len(resp.Choices) != 1 {
lastErr = fmt.Errorf("no choices: %d", len(resp.Choices))
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", lastErr)
if obs != nil {
obs.Progress[len(obs.Progress)-1].Error = lastErr.Error()
a.observer.Update(*obs)
}
continue
}
@@ -100,12 +68,6 @@ func (a *Agent) decision(
if err := a.saveConversation(append(conversation, msg), "decision"); err != nil {
xlog.Error("Error saving conversation", "error", err)
}
if obs != nil {
obs.MakeLastProgressCompletion()
a.observer.Update(*obs)
}
return &decisionResult{message: msg.Content}, nil
}
@@ -113,12 +75,6 @@ func (a *Agent) decision(
if err := params.Read(msg.ToolCalls[0].Function.Arguments); err != nil {
lastErr = err
xlog.Warn("Attempt to parse action parameters failed", "attempt", attempts+1, "error", err)
if obs != nil {
obs.Progress[len(obs.Progress)-1].Error = lastErr.Error()
a.observer.Update(*obs)
}
continue
}
@@ -126,11 +82,6 @@ func (a *Agent) decision(
xlog.Error("Error saving conversation", "error", err)
}
if obs != nil {
obs.MakeLastProgressCompletion()
a.observer.Update(*obs)
}
return &decisionResult{actionParams: params, actioName: msg.ToolCalls[0].Function.Name, message: msg.Content}, nil
}
@@ -222,7 +173,7 @@ func (m Messages) IsLastMessageFromRole(role string) bool {
return m[len(m)-1].Role == role
}
func (a *Agent) generateParameters(job *types.Job, pickTemplate string, act types.Action, c []openai.ChatCompletionMessage, reasoning string, maxAttempts int) (*decisionResult, error) {
func (a *Agent) generateParameters(ctx context.Context, pickTemplate string, act types.Action, c []openai.ChatCompletionMessage, reasoning string, maxAttempts int) (*decisionResult, error) {
stateHUD, err := renderTemplate(pickTemplate, a.prepareHUD(), a.availableActions(), reasoning)
if err != nil {
return nil, err
@@ -250,7 +201,7 @@ func (a *Agent) generateParameters(job *types.Job, pickTemplate string, act type
var attemptErr error
for attempts := 0; attempts < maxAttempts; attempts++ {
result, attemptErr = a.decision(job,
result, attemptErr = a.decision(ctx,
cc,
a.availableActions().ToTools(),
act.Definition().Name.String(),
@@ -312,7 +263,7 @@ func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction
subTaskAction := a.availableActions().Find(subtask.Action)
subTaskReasoning := fmt.Sprintf("%s Overall goal is: %s", subtask.Reasoning, planResult.Goal)
params, err := a.generateParameters(job, pickTemplate, subTaskAction, conv, subTaskReasoning, maxRetries)
params, err := a.generateParameters(ctx, pickTemplate, subTaskAction, conv, subTaskReasoning, maxRetries)
if err != nil {
xlog.Error("error generating action's parameters", "error", err)
return conv, fmt.Errorf("error generating action's parameters: %w", err)
@@ -342,7 +293,7 @@ func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction
break
}
result, err := a.runAction(job, subTaskAction, actionParams)
result, err := a.runAction(ctx, subTaskAction, actionParams)
if err != nil {
xlog.Error("error running action", "error", err)
return conv, fmt.Errorf("error running action: %w", err)
@@ -427,7 +378,7 @@ func (a *Agent) prepareHUD() (promptHUD *PromptHUD) {
}
// pickAction picks an action based on the conversation
func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatCompletionMessage, maxRetries int) (types.Action, types.ActionParams, string, error) {
func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.ChatCompletionMessage, maxRetries int) (types.Action, types.ActionParams, string, error) {
c := messages
xlog.Debug("[pickAction] picking action starts", "messages", messages)
@@ -438,7 +389,7 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
xlog.Debug("not forcing reasoning")
// We also could avoid to use functions here and get just a reply from the LLM
// and then use the reply to get the action
thought, err := a.decision(job,
thought, err := a.decision(ctx,
messages,
a.availableActions().ToTools(),
"",
@@ -480,7 +431,7 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
}, c...)
}
thought, err := a.decision(job,
thought, err := a.decision(ctx,
c,
types.Actions{action.NewReasoning()}.ToTools(),
action.NewReasoning().Definition().Name.String(), maxRetries)
@@ -516,7 +467,7 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
// to avoid hallucinations
// Extract an action
params, err := a.decision(job,
params, err := a.decision(ctx,
append(c, openai.ChatCompletionMessage{
Role: "system",
Content: "Pick the relevant action given the following reasoning: " + originalReasoning,

View File

@@ -30,7 +30,7 @@ type Agent struct {
jobQueue chan *types.Job
context *types.ActionContext
currentState *types.AgentInternalState
currentState *action.AgentInternalState
selfEvaluationInProgress bool
pause bool
@@ -41,8 +41,6 @@ type Agent struct {
subscriberMutex sync.Mutex
newMessagesSubscribers []func(openai.ChatCompletionMessage)
observer Observer
}
type RAGDB interface {
@@ -71,17 +69,12 @@ func New(opts ...Option) (*Agent, error) {
options: options,
client: client,
Character: options.character,
currentState: &types.AgentInternalState{},
currentState: &action.AgentInternalState{},
context: types.NewActionContext(ctx, cancel),
newConversations: make(chan openai.ChatCompletionMessage),
newMessagesSubscribers: options.newConversationsSubscribers,
}
// Initialize observer if provided
if options.observer != nil {
a.observer = options.observer
}
if a.options.statefile != "" {
if _, err := os.Stat(a.options.statefile); err == nil {
if err = a.LoadState(a.options.statefile); err != nil {
@@ -153,14 +146,6 @@ func (a *Agent) Ask(opts ...types.JobOption) *types.JobResult {
xlog.Debug("Agent has finished being asked", "agent", a.Character.Name)
}()
if a.observer != nil {
obs := a.observer.NewObservable()
obs.Name = "job"
obs.Icon = "plug"
a.observer.Update(*obs)
opts = append(opts, types.WithObservable(obs))
}
return a.Execute(types.NewJob(
append(
opts,
@@ -178,20 +163,6 @@ func (a *Agent) Execute(j *types.Job) *types.JobResult {
xlog.Debug("Agent has finished", "agent", a.Character.Name)
}()
if j.Obs != nil {
j.Result.AddFinalizer(func(ccm []openai.ChatCompletionMessage) {
j.Obs.Completion = &types.Completion{
Conversation: ccm,
}
if j.Result.Error != nil {
j.Obs.Completion.Error = j.Result.Error.Error()
}
a.observer.Update(*j.Obs)
})
}
a.Enqueue(j)
return j.Result.WaitResult()
}
@@ -266,78 +237,34 @@ func (a *Agent) Memory() RAGDB {
return a.options.ragdb
}
func (a *Agent) runAction(job *types.Job, chosenAction types.Action, params types.ActionParams) (result types.ActionResult, err error) {
var obs *types.Observable
if job.Obs != nil {
obs = a.observer.NewObservable()
obs.Name = "action"
obs.Icon = "bolt"
obs.ParentID = job.Obs.ID
obs.Creation = &types.Creation{
FunctionDefinition: chosenAction.Definition().ToFunctionDefinition(),
FunctionParams: params,
}
a.observer.Update(*obs)
}
xlog.Info("[runAction] Running action", "action", chosenAction.Definition().Name, "agent", a.Character.Name, "params", params.String())
func (a *Agent) runAction(ctx context.Context, chosenAction types.Action, params types.ActionParams) (result types.ActionResult, err error) {
for _, act := range a.availableActions() {
if act.Definition().Name == chosenAction.Definition().Name {
res, err := act.Run(job.GetContext(), params)
res, err := act.Run(ctx, params)
if err != nil {
if obs != nil {
obs.Completion = &types.Completion{
Error: err.Error(),
}
}
return types.ActionResult{}, fmt.Errorf("error running action: %w", err)
}
if obs != nil {
obs.Progress = append(obs.Progress, types.Progress{
ActionResult: res.Result,
})
a.observer.Update(*obs)
}
result = res
}
}
xlog.Info("[runAction] Running action", "action", chosenAction.Definition().Name, "agent", a.Character.Name, "params", params.String())
if chosenAction.Definition().Name.Is(action.StateActionName) {
// We need to store the result in the state
state := types.AgentInternalState{}
state := action.AgentInternalState{}
err = params.Unmarshal(&state)
if err != nil {
werr := fmt.Errorf("error unmarshalling state of the agent: %w", err)
if obs != nil {
obs.Completion = &types.Completion{
Error: werr.Error(),
}
}
return types.ActionResult{}, werr
return types.ActionResult{}, fmt.Errorf("error unmarshalling state of the agent: %w", err)
}
// update the current state with the one we just got from the action
a.currentState = &state
if obs != nil {
obs.Progress = append(obs.Progress, types.Progress{
AgentState: &state,
})
a.observer.Update(*obs)
}
// update the state file
if a.options.statefile != "" {
if err := a.SaveState(a.options.statefile); err != nil {
if obs != nil {
obs.Completion = &types.Completion{
Error: err.Error(),
}
}
return types.ActionResult{}, err
}
}
@@ -345,11 +272,6 @@ func (a *Agent) runAction(job *types.Job, chosenAction types.Action, params type
xlog.Debug("[runAction] Action result", "action", chosenAction.Definition().Name, "params", params.String(), "result", result.Result)
if obs != nil {
obs.MakeLastProgressCompletion()
a.observer.Update(*obs)
}
return result, nil
}
@@ -546,7 +468,7 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
chosenAction = *action
reasoning = reason
if params == nil {
p, err := a.generateParameters(job, pickTemplate, chosenAction, conv, reasoning, maxRetries)
p, err := a.generateParameters(job.GetContext(), pickTemplate, chosenAction, conv, reasoning, maxRetries)
if err != nil {
xlog.Error("Error generating parameters, trying again", "error", err)
// try again
@@ -561,7 +483,7 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
job.ResetNextAction()
} else {
var err error
chosenAction, actionParams, reasoning, err = a.pickAction(job, pickTemplate, conv, maxRetries)
chosenAction, actionParams, reasoning, err = a.pickAction(job.GetContext(), pickTemplate, conv, maxRetries)
if err != nil {
xlog.Error("Error picking action", "error", err)
job.Result.Finish(err)
@@ -635,7 +557,7 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
"reasoning", reasoning,
)
params, err := a.generateParameters(job, pickTemplate, chosenAction, conv, reasoning, maxRetries)
params, err := a.generateParameters(job.GetContext(), pickTemplate, chosenAction, conv, reasoning, maxRetries)
if err != nil {
xlog.Error("Error generating parameters, trying again", "error", err)
// try again
@@ -730,7 +652,7 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
}
if !chosenAction.Definition().Name.Is(action.PlanActionName) {
result, err := a.runAction(job, chosenAction, actionParams)
result, err := a.runAction(job.GetContext(), chosenAction, actionParams)
if err != nil {
//job.Result.Finish(fmt.Errorf("error running action: %w", err))
//return
@@ -755,7 +677,7 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
}
// given the result, we can now re-evaluate the conversation
followingAction, followingParams, reasoning, err := a.pickAction(job, reEvaluationTemplate, conv, maxRetries)
followingAction, followingParams, reasoning, err := a.pickAction(job.GetContext(), reEvaluationTemplate, conv, maxRetries)
if err != nil {
job.Result.Conversation = conv
job.Result.Finish(fmt.Errorf("error picking action: %w", err))
@@ -1033,7 +955,3 @@ func (a *Agent) loop(timer *time.Timer, job *types.Job) {
xlog.Debug("Agent is consuming a job", "agent", a.Character.Name, "job", job)
a.consumeJob(job, UserRole)
}
func (a *Agent) Observer() Observer {
return a.observer
}

View File

@@ -226,10 +226,7 @@ var _ = Describe("Agent test", func() {
WithLLMAPIKey(apiKeyURL),
WithTimeout("10m"),
WithActions(
&TestAction{response: map[string]string{
"boston": testActionResult,
"milan": testActionResult2,
}},
actions.NewSearch(map[string]string{}),
),
EnablePlanning,
EnableForceReasoning,
@@ -241,21 +238,18 @@ var _ = Describe("Agent test", func() {
defer agent.Stop()
result := agent.Ask(
types.WithText("Use the plan tool to do two actions in sequence: search for the weather in boston and search for the weather in milan"),
types.WithText("Thoroughly plan a trip to San Francisco from Venice, Italy; check flight times, visa requirements and whether electrical items are allowed in cabin luggage."),
)
Expect(len(result.State)).To(BeNumerically(">", 1))
actionsExecuted := []string{}
actionResults := []string{}
for _, r := range result.State {
xlog.Info(r.Result)
actionsExecuted = append(actionsExecuted, r.Action.Definition().Name.String())
actionResults = append(actionResults, r.ActionResult.Result)
}
Expect(actionsExecuted).To(ContainElement("get_weather"), fmt.Sprint(result))
Expect(actionsExecuted).To(ContainElement("search_internet"), fmt.Sprint(result))
Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result))
Expect(actionResults).To(ContainElement(testActionResult), fmt.Sprint(result))
Expect(actionResults).To(ContainElement(testActionResult2), fmt.Sprint(result))
})
It("Can initiate conversations", func() {

View File

@@ -1,87 +0,0 @@
package agent
import (
"encoding/json"
"sync"
"sync/atomic"
"github.com/mudler/LocalAGI/core/sse"
"github.com/mudler/LocalAGI/core/types"
"github.com/mudler/LocalAGI/pkg/xlog"
)
type Observer interface {
NewObservable() *types.Observable
Update(types.Observable)
History() []types.Observable
}
type SSEObserver struct {
agent string
maxID int32
manager sse.Manager
mutex sync.Mutex
history []types.Observable
historyLast int
}
func NewSSEObserver(agent string, manager sse.Manager) *SSEObserver {
return &SSEObserver{
agent: agent,
maxID: 1,
manager: manager,
history: make([]types.Observable, 100),
}
}
func (s *SSEObserver) NewObservable() *types.Observable {
id := atomic.AddInt32(&s.maxID, 1)
return &types.Observable{
ID: id - 1,
Agent: s.agent,
}
}
func (s *SSEObserver) Update(obs types.Observable) {
data, err := json.Marshal(obs)
if err != nil {
xlog.Error("Error marshaling observable", "error", err)
return
}
msg := sse.NewMessage(string(data)).WithEvent("observable_update")
s.manager.Send(msg)
s.mutex.Lock()
defer s.mutex.Unlock()
for i, o := range s.history {
if o.ID == obs.ID {
s.history[i] = obs
return
}
}
s.history[s.historyLast] = obs
s.historyLast += 1
if s.historyLast >= len(s.history) {
s.historyLast = 0
}
}
func (s *SSEObserver) History() []types.Observable {
h := make([]types.Observable, 0, 20)
s.mutex.Lock()
defer s.mutex.Unlock()
for _, obs := range s.history {
if obs.ID == 0 {
continue
}
h = append(h, obs)
}
return h
}

View File

@@ -53,8 +53,6 @@ type options struct {
mcpServers []MCPServer
newConversationsSubscribers []func(openai.ChatCompletionMessage)
observer Observer
}
func (o *options) SeparatedMultimodalModel() bool {
@@ -338,10 +336,3 @@ func WithActions(actions ...types.Action) Option {
return nil
}
}
func WithObserver(observer Observer) Option {
return func(o *options) error {
o.observer = observer
return nil
}
}

View File

@@ -6,7 +6,7 @@ import (
"os"
"path/filepath"
"github.com/mudler/LocalAGI/core/types"
"github.com/mudler/LocalAGI/core/action"
"github.com/sashabaranov/go-openai/jsonschema"
)
@@ -15,7 +15,7 @@ import (
// in the prompts
type PromptHUD struct {
Character Character `json:"character"`
CurrentState types.AgentInternalState `json:"current_state"`
CurrentState action.AgentInternalState `json:"current_state"`
PermanentGoal string `json:"permanent_goal"`
ShowCharacter bool `json:"show_character"`
}
@@ -80,7 +80,7 @@ func Load(path string) (*Character, error) {
return &c, nil
}
func (a *Agent) State() types.AgentInternalState {
func (a *Agent) State() action.AgentInternalState {
return *a.currentState
}

View File

@@ -407,7 +407,6 @@ func (a *AgentPool) startAgentWithConfig(name string, config *AgentConfig) error
c.AgentResultCallback()(state)
}
}),
WithObserver(NewSSEObserver(name, manager)),
}
if config.HUD {

View File

@@ -27,8 +27,6 @@ type Job struct {
context context.Context
cancel context.CancelFunc
Obs *Observable
}
type ActionRequest struct {
@@ -200,9 +198,3 @@ func (j *Job) Cancel() {
func (j *Job) GetContext() context.Context {
return j.context
}
func WithObservable(obs *Observable) JobOption {
return func(j *Job) {
j.Obs = obs
}
}

View File

@@ -1,61 +0,0 @@
package types
import (
"github.com/mudler/LocalAGI/pkg/xlog"
"github.com/sashabaranov/go-openai"
)
type Creation struct {
ChatCompletionRequest *openai.ChatCompletionRequest `json:"chat_completion_request,omitempty"`
FunctionDefinition *openai.FunctionDefinition `json:"function_definition,omitempty"`
FunctionParams ActionParams `json:"function_params,omitempty"`
}
type Progress struct {
Error string `json:"error,omitempty"`
ChatCompletionResponse *openai.ChatCompletionResponse `json:"chat_completion_response,omitempty"`
ActionResult string `json:"action_result,omitempty"`
AgentState *AgentInternalState `json:"agent_state"`
}
type Completion struct {
Error string `json:"error,omitempty"`
ChatCompletionResponse *openai.ChatCompletionResponse `json:"chat_completion_response,omitempty"`
Conversation []openai.ChatCompletionMessage `json:"conversation,omitempty"`
ActionResult string `json:"action_result,omitempty"`
AgentState *AgentInternalState `json:"agent_state"`
}
type Observable struct {
ID int32 `json:"id"`
ParentID int32 `json:"parent_id,omitempty"`
Agent string `json:"agent"`
Name string `json:"name"`
Icon string `json:"icon"`
Creation *Creation `json:"creation,omitempty"`
Progress []Progress `json:"progress,omitempty"`
Completion *Completion `json:"completion,omitempty"`
}
func (o *Observable) AddProgress(p Progress) {
if o.Progress == nil {
o.Progress = make([]Progress, 0)
}
o.Progress = append(o.Progress, p)
}
func (o *Observable) MakeLastProgressCompletion() {
if len(o.Progress) == 0 {
xlog.Error("Observable completed without any progress", "id", o.ID, "name", o.Name)
return
}
p := o.Progress[len(o.Progress)-1]
o.Progress = o.Progress[:len(o.Progress)-1]
o.Completion = &Completion{
Error: p.Error,
ChatCompletionResponse: p.ChatCompletionResponse,
ActionResult: p.ActionResult,
AgentState: p.AgentState,
}
}

View File

@@ -1,41 +0,0 @@
package types
import "fmt"
// State is the structure
// that is used to keep track of the current state
// and the Agent's short memory that it can update
// Besides a long term memory that is accessible by the agent (With vector database),
// And a context memory (that is always powered by a vector database),
// this memory is the shorter one that the LLM keeps across conversation and across its
// reasoning process's and life time.
// TODO: A special action is then used to let the LLM itself update its memory
// periodically during self-processing, and the same action is ALSO exposed
// during the conversation to let the user put for example, a new goal to the agent.
type AgentInternalState struct {
NowDoing string `json:"doing_now"`
DoingNext string `json:"doing_next"`
DoneHistory []string `json:"done_history"`
Memories []string `json:"memories"`
Goal string `json:"goal"`
}
const fmtT = `=====================
NowDoing: %s
DoingNext: %s
Your current goal is: %s
You have done: %+v
You have a short memory with: %+v
=====================
`
func (c AgentInternalState) String() string {
return fmt.Sprintf(
fmtT,
c.NowDoing,
c.DoingNext,
c.Goal,
c.DoneHistory,
c.Memories,
)
}

View File

@@ -7,7 +7,7 @@ services:
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
image: localai/localai:master-ffmpeg-core
command:
- ${MODEL_NAME:-gemma-3-12b-it-qat}
- ${MODEL_NAME:-arcee-agent}
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
- ${IMAGE_MODEL:-sd-1.5-ggml}
- granite-embedding-107m-multilingual
@@ -59,7 +59,7 @@ services:
- 8080:3000
#image: quay.io/mudler/localagi:master
environment:
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-12b-it-qat}
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
- LOCALAGI_LLM_API_URL=http://localai:8080

View File

@@ -22,7 +22,6 @@ var withLogs = os.Getenv("LOCALAGI_ENABLE_CONVERSATIONS_LOGGING") == "true"
var apiKeysEnv = os.Getenv("LOCALAGI_API_KEYS")
var imageModel = os.Getenv("LOCALAGI_IMAGE_MODEL")
var conversationDuration = os.Getenv("LOCALAGI_CONVERSATION_DURATION")
var localOperatorBaseURL = os.Getenv("LOCALOPERATOR_BASE_URL")
func init() {
if baseModel == "" {
@@ -62,9 +61,7 @@ func main() {
apiKey,
stateDir,
localRAG,
services.Actions(map[string]string{
"browser-agent-runner-base-url": localOperatorBaseURL,
}),
services.Actions,
services.Connectors,
services.DynamicPrompts,
timeout,

View File

@@ -1,70 +0,0 @@
package api
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
)
// Client represents a client for interacting with the LocalOperator API
type Client struct {
baseURL string
httpClient *http.Client
}
// NewClient creates a new API client
func NewClient(baseURL string) *Client {
return &Client{
baseURL: baseURL,
httpClient: &http.Client{},
}
}
// AgentRequest represents the request body for running an agent
type AgentRequest struct {
Goal string `json:"goal"`
MaxAttempts int `json:"max_attempts,omitempty"`
MaxNoActionAttempts int `json:"max_no_action_attempts,omitempty"`
}
// StateDescription represents a single state in the agent's history
type StateDescription struct {
CurrentURL string `json:"current_url"`
PageTitle string `json:"page_title"`
PageContentDescription string `json:"page_content_description"`
}
// StateHistory represents the complete history of states during agent execution
type StateHistory struct {
States []StateDescription `json:"states"`
}
// RunAgent sends a request to run an agent with the given goal
func (c *Client) RunBrowserAgent(req AgentRequest) (*StateHistory, error) {
body, err := json.Marshal(req)
if err != nil {
return nil, fmt.Errorf("failed to marshal request: %w", err)
}
resp, err := c.httpClient.Post(
fmt.Sprintf("%s/api/browser/run", c.baseURL),
"application/json",
bytes.NewBuffer(body),
)
if err != nil {
return nil, fmt.Errorf("failed to send request: %w", err)
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code: %d", resp.StatusCode)
}
var state StateHistory
if err := json.NewDecoder(resp.Body).Decode(&state); err != nil {
return nil, fmt.Errorf("failed to decode response: %w", err)
}
return &state, nil
}

View File

@@ -18,7 +18,6 @@ const (
// Actions
ActionSearch = "search"
ActionCustom = "custom"
ActionBrowserAgentRunner = "browser-agent-runner"
ActionGithubIssueLabeler = "github-issue-labeler"
ActionGithubIssueOpener = "github-issue-opener"
ActionGithubIssueCloser = "github-issue-closer"
@@ -53,7 +52,6 @@ var AvailableActions = []string{
ActionGithubIssueSearcher,
ActionGithubRepositoryGet,
ActionGithubGetAllContent,
ActionBrowserAgentRunner,
ActionGithubRepositoryCreateOrUpdate,
ActionGithubIssueReader,
ActionGithubIssueCommenter,
@@ -73,34 +71,31 @@ var AvailableActions = []string{
ActionShellcommand,
}
func Actions(actionsConfigs map[string]string) func(a *state.AgentConfig) func(ctx context.Context, pool *state.AgentPool) []types.Action {
return func(a *state.AgentConfig) func(ctx context.Context, pool *state.AgentPool) []types.Action {
return func(ctx context.Context, pool *state.AgentPool) []types.Action {
allActions := []types.Action{}
func Actions(a *state.AgentConfig) func(ctx context.Context, pool *state.AgentPool) []types.Action {
return func(ctx context.Context, pool *state.AgentPool) []types.Action {
allActions := []types.Action{}
agentName := a.Name
agentName := a.Name
for _, a := range a.Actions {
var config map[string]string
if err := json.Unmarshal([]byte(a.Config), &config); err != nil {
xlog.Error("Error unmarshalling action config", "error", err)
continue
}
a, err := Action(a.Name, agentName, config, pool, actionsConfigs)
if err != nil {
continue
}
allActions = append(allActions, a)
for _, a := range a.Actions {
var config map[string]string
if err := json.Unmarshal([]byte(a.Config), &config); err != nil {
xlog.Error("Error unmarshalling action config", "error", err)
continue
}
return allActions
a, err := Action(a.Name, agentName, config, pool)
if err != nil {
continue
}
allActions = append(allActions, a)
}
}
return allActions
}
}
func Action(name, agentName string, config map[string]string, pool *state.AgentPool, actionsConfigs map[string]string) (types.Action, error) {
func Action(name, agentName string, config map[string]string, pool *state.AgentPool) (types.Action, error) {
var a types.Action
var err error
@@ -119,8 +114,6 @@ func Action(name, agentName string, config map[string]string, pool *state.AgentP
a = actions.NewGithubIssueCloser(config)
case ActionGithubIssueSearcher:
a = actions.NewGithubIssueSearch(config)
case ActionBrowserAgentRunner:
a = actions.NewBrowserAgentRunner(config, actionsConfigs["browser-agent-runner-base-url"])
case ActionGithubIssueReader:
a = actions.NewGithubIssueReader(config)
case ActionGithubPRReader:
@@ -176,11 +169,6 @@ func ActionsConfigMeta() []config.FieldGroup {
Label: "Search",
Fields: actions.SearchConfigMeta(),
},
{
Name: "browser-agent-runner",
Label: "Browser Agent Runner",
Fields: actions.BrowserAgentRunnerConfigMeta(),
},
{
Name: "generate_image",
Label: "Generate Image",

View File

@@ -1,116 +0,0 @@
package actions
import (
"context"
"fmt"
"github.com/mudler/LocalAGI/core/types"
"github.com/mudler/LocalAGI/pkg/config"
api "github.com/mudler/LocalAGI/pkg/localoperator"
"github.com/sashabaranov/go-openai/jsonschema"
)
type BrowserAgentRunner struct {
baseURL, customActionName string
client *api.Client
}
func NewBrowserAgentRunner(config map[string]string, defaultURL string) *BrowserAgentRunner {
if config["baseURL"] == "" {
config["baseURL"] = defaultURL
}
client := api.NewClient(config["baseURL"])
return &BrowserAgentRunner{
client: client,
baseURL: config["baseURL"],
customActionName: config["customActionName"],
}
}
func (b *BrowserAgentRunner) Run(ctx context.Context, params types.ActionParams) (types.ActionResult, error) {
result := api.AgentRequest{}
err := params.Unmarshal(&result)
if err != nil {
return types.ActionResult{}, fmt.Errorf("failed to unmarshal params: %w", err)
}
req := api.AgentRequest{
Goal: result.Goal,
MaxAttempts: result.MaxAttempts,
MaxNoActionAttempts: result.MaxNoActionAttempts,
}
stateHistory, err := b.client.RunBrowserAgent(req)
if err != nil {
return types.ActionResult{}, fmt.Errorf("failed to run browser agent: %w", err)
}
// Format the state history into a readable string
var historyStr string
// for i, state := range stateHistory.States {
// historyStr += fmt.Sprintf("State %d:\n", i+1)
// historyStr += fmt.Sprintf(" URL: %s\n", state.CurrentURL)
// historyStr += fmt.Sprintf(" Title: %s\n", state.PageTitle)
// historyStr += fmt.Sprintf(" Description: %s\n\n", state.PageContentDescription)
// }
historyStr += fmt.Sprintf(" URL: %s\n", stateHistory.States[len(stateHistory.States)-1].CurrentURL)
historyStr += fmt.Sprintf(" Title: %s\n", stateHistory.States[len(stateHistory.States)-1].PageTitle)
historyStr += fmt.Sprintf(" Description: %s\n\n", stateHistory.States[len(stateHistory.States)-1].PageContentDescription)
return types.ActionResult{
Result: fmt.Sprintf("Browser agent completed successfully. History:\n%s", historyStr),
}, nil
}
func (b *BrowserAgentRunner) Definition() types.ActionDefinition {
actionName := "run_browser_agent"
if b.customActionName != "" {
actionName = b.customActionName
}
description := "Run a browser agent to achieve a specific goal, for example: 'Go to https://www.google.com and search for 'LocalAI', and tell me what's on the first page'"
return types.ActionDefinition{
Name: types.ActionDefinitionName(actionName),
Description: description,
Properties: map[string]jsonschema.Definition{
"goal": {
Type: jsonschema.String,
Description: "The goal for the browser agent to achieve",
},
"max_attempts": {
Type: jsonschema.Number,
Description: "Maximum number of attempts the agent can make (optional)",
},
"max_no_action_attempts": {
Type: jsonschema.Number,
Description: "Maximum number of attempts without taking an action (optional)",
},
},
Required: []string{"goal"},
}
}
func (a *BrowserAgentRunner) Plannable() bool {
return true
}
// BrowserAgentRunnerConfigMeta returns the metadata for Browser Agent Runner action configuration fields
func BrowserAgentRunnerConfigMeta() []config.Field {
return []config.Field{
{
Name: "baseURL",
Label: "Base URL",
Type: config.FieldTypeText,
Required: false,
HelpText: "Base URL of the LocalOperator API",
},
{
Name: "customActionName",
Label: "Custom Action Name",
Type: config.FieldTypeText,
HelpText: "Custom name for this action",
},
}
}

View File

@@ -370,7 +370,7 @@ func (a *App) Chat(pool *state.AgentPool) func(c *fiber.Ctx) error {
xlog.Error("Error marshaling status message", "error", err)
} else {
manager.Send(
sse.NewMessage(string(statusData)).WithEvent("json_message_status"))
sse.NewMessage(string(statusData)).WithEvent("json_status"))
}
// Process the message asynchronously
@@ -417,7 +417,7 @@ func (a *App) Chat(pool *state.AgentPool) func(c *fiber.Ctx) error {
xlog.Error("Error marshaling completed status", "error", err)
} else {
manager.Send(
sse.NewMessage(string(completedData)).WithEvent("json_message_status"))
sse.NewMessage(string(completedData)).WithEvent("json_status"))
}
}()
@@ -444,7 +444,7 @@ func (a *App) ExecuteAction(pool *state.AgentPool) func(c *fiber.Ctx) error {
actionName := c.Params("name")
xlog.Debug("Executing action", "action", actionName, "config", payload.Config, "params", payload.Params)
a, err := services.Action(actionName, "", payload.Config, pool, map[string]string{})
a, err := services.Action(actionName, "", payload.Config, pool)
if err != nil {
xlog.Error("Error creating action", "error", err)
return errorJSONMessage(c, err.Error())

View File

@@ -4,7 +4,6 @@
"": {
"name": "react-ui",
"dependencies": {
"highlight.js": "^11.11.1",
"react": "^19.1.0",
"react-dom": "^19.1.0",
},
@@ -301,8 +300,6 @@
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
"highlight.js": ["highlight.js@11.11.1", "", {}, "sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w=="],
"ignore": ["ignore@5.3.2", "", {}, "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="],
"import-fresh": ["import-fresh@3.3.1", "", { "dependencies": { "parent-module": "^1.0.0", "resolve-from": "^4.0.0" } }, "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="],

View File

@@ -11,8 +11,7 @@
},
"dependencies": {
"react": "^19.1.0",
"react-dom": "^19.1.0",
"highlight.js": "^11.11.1"
"react-dom": "^19.1.0"
},
"devDependencies": {
"@eslint/js": "^9.24.0",

View File

@@ -1,17 +1,4 @@
/* Base styles */
pre.hljs {
background-color: var(--medium-bg);
padding: 1rem;
border-radius: 8px;
overflow-x: auto;
font-family: 'JetBrains Mono', monospace;
line-height: 1.5;
}
code.json {
display: block;
}
:root {
--primary: #00ff95;
--secondary: #ff00b1;
@@ -2007,62 +1994,16 @@ select.form-control {
text-decoration: none;
}
.file-button:hover {
background: rgba(0, 255, 149, 0.8);
transform: translateY(-2px);
box-shadow: 0 4px 8px rgba(0, 0, 0, 0.2);
}
.file-button i {
font-size: 16px;
}
.card {
background: var(--medium-bg);
border: 1px solid var(--border);
border-radius: 8px;
padding: 15px;
margin-bottom: 15px;
transition: all 0.3s ease;
box-shadow: 0 2px 10px rgba(0, 0, 0, 0.1);
cursor: pointer;
}
.card:hover {
transform: translateY(-2px);
box-shadow: 0 5px 15px rgba(0, 0, 0, 0.2);
background: var(--light-bg);
}
.spinner {
width: 16px;
height: 16px;
border: 2px solid var(--primary);
border-radius: 50%;
border-top-color: transparent;
animation: spin 1s linear infinite;
}
@keyframes spin {
0% { transform: rotate(0deg); }
100% { transform: rotate(360deg); }
}
.expand-button {
background: none;
border: none;
color: var(--primary);
cursor: pointer;
font-size: 1.2em;
padding: 5px;
margin-left: 10px;
transition: all 0.3s ease;
}
.expand-button:hover {
color: var(--success);
transform: scale(1.1);
}
.expand-button:focus {
outline: none;
box-shadow: 0 0 0 2px var(--primary);
}
.selected-file-info {
margin-top: 20px;
padding: 20px;

View File

@@ -63,8 +63,8 @@ export function useSSE(agentName) {
}
});
// Handle 'json_message_status' event
eventSource.addEventListener('json_message_status', (event) => {
// Handle 'json_status' event
eventSource.addEventListener('json_status', (event) => {
try {
const data = JSON.parse(event.data);
const timestamp = data.timestamp || new Date().toISOString();

View File

@@ -1,22 +1,13 @@
import { useState, useEffect } from 'react';
import { useParams, Link } from 'react-router-dom';
import hljs from 'highlight.js/lib/core';
import json from 'highlight.js/lib/languages/json';
import 'highlight.js/styles/monokai.css';
hljs.registerLanguage('json', json);
function AgentStatus() {
const [showStatus, setShowStatus] = useState(true);
const { name } = useParams();
const [statusData, setStatusData] = useState(null);
const [loading, setLoading] = useState(true);
const [error, setError] = useState(null);
const [_eventSource, setEventSource] = useState(null);
// Store all observables by id
const [observableMap, setObservableMap] = useState({});
const [observableTree, setObservableTree] = useState([]);
const [expandedCards, setExpandedCards] = useState(new Map());
const [liveUpdates, setLiveUpdates] = useState([]);
// Update document title
useEffect(() => {
@@ -48,80 +39,17 @@ function AgentStatus() {
fetchStatusData();
// Helper to build observable tree from map
function buildObservableTree(map) {
const nodes = Object.values(map);
const nodeMap = {};
nodes.forEach(node => { nodeMap[node.id] = { ...node, children: [] }; });
const roots = [];
nodes.forEach(node => {
if (!node.parent_id) {
roots.push(nodeMap[node.id]);
} else if (nodeMap[node.parent_id]) {
nodeMap[node.parent_id].children.push(nodeMap[node.id]);
}
});
return roots;
}
// Fetch initial observable history
const fetchObservables = async () => {
try {
const response = await fetch(`/api/agent/${name}/observables`);
if (!response.ok) return;
const data = await response.json();
if (Array.isArray(data.History)) {
const map = {};
data.History.forEach(obs => {
map[obs.id] = obs;
});
setObservableMap(map);
setObservableTree(buildObservableTree(map));
}
} catch (err) {
// Ignore errors for now
}
};
fetchObservables();
// Setup SSE connection for live updates
const sse = new EventSource(`/sse/${name}`);
setEventSource(sse);
sse.addEventListener('observable_update', (event) => {
const data = JSON.parse(event.data);
console.log(data);
setObservableMap(prevMap => {
const prev = prevMap[data.id] || {};
const updated = {
...prev,
...data,
creation: data.creation,
progress: data.progress,
completion: data.completion,
// children are always built client-side
};
const newMap = { ...prevMap, [data.id]: updated };
setObservableTree(buildObservableTree(newMap));
return newMap;
});
});
// Listen for status events and append to statusData.History
sse.addEventListener('status', (event) => {
const status = event.data;
setStatusData(prev => {
// If prev is null, start a new object
if (!prev || typeof prev !== 'object') {
return { History: [status] };
}
// If History not present, add it
if (!Array.isArray(prev.History)) {
return { ...prev, History: [status] };
}
// Otherwise, append
return { ...prev, History: [...prev.History, status] };
});
try {
const data = JSON.parse(event.data);
setLiveUpdates(prev => [data, ...prev.slice(0, 19)]); // Keep last 20 updates
} catch (err) {
setLiveUpdates(prev => [event.data, ...prev.slice(0, 19)]);
}
});
sse.onerror = (err) => {
@@ -155,8 +83,8 @@ function AgentStatus() {
if (loading) {
return (
<div>
<div></div>
<div className="loading-container">
<div className="loader"></div>
<p>Loading agent status...</p>
</div>
);
@@ -164,199 +92,56 @@ function AgentStatus() {
if (error) {
return (
<div>
<div className="error-container">
<h2>Error</h2>
<p>{error}</p>
<Link to="/agents">
<Link to="/agents" className="back-btn">
<i className="fas fa-arrow-left"></i> Back to Agents
</Link>
</div>
);
}
return (
<div>
<h1>Agent Status: {name}</h1>
<div style={{ color: '#aaa', fontSize: 16, marginBottom: 18 }}>
See what the agent is doing and thinking
</div>
{error && (
<div>
{error}
</div>
)}
{loading && <div>Loading...</div>}
{statusData && (
<div>
<div>
<div style={{ display: 'flex', alignItems: 'center', cursor: 'pointer', userSelect: 'none' }}
onClick={() => setShowStatus(prev => !prev)}>
<h2 style={{ margin: 0 }}>Current Status</h2>
<i
className={`fas fa-chevron-${showStatus ? 'up' : 'down'}`}
style={{ color: 'var(--primary)', marginLeft: 12 }}
title={showStatus ? 'Collapse' : 'Expand'}
/>
</div>
<div style={{ color: '#aaa', fontSize: 14, margin: '5px 0 10px 2px' }}>
Summary of the agent's thoughts and actions
</div>
{showStatus && (
<div style={{ marginTop: 10 }}>
{(Array.isArray(statusData?.History) && statusData.History.length === 0) && (
<div style={{ color: '#aaa' }}>No status history available.</div>
)}
{Array.isArray(statusData?.History) && statusData.History.map((item, idx) => (
<div key={idx} style={{
background: '#222',
border: '1px solid #444',
borderRadius: 8,
padding: '12px 16px',
marginBottom: 10,
whiteSpace: 'pre-line',
fontFamily: 'inherit',
fontSize: 15,
color: '#eee',
}}>
{/* Replace <br> tags with newlines, then render as pre-line */}
{typeof item === 'string'
? item.replace(/<br\s*\/?>/gi, '\n')
: JSON.stringify(item)}
</div>
))}
</div>
)}
</div>
{observableTree.length > 0 && (
<div>
<h2>Observable Updates</h2>
<div style={{ color: '#aaa', fontSize: 14, margin: '5px 0 10px 2px' }}>
Drill down into what the agent is doing and thinking when activated by a connector
</div>
<div>
{observableTree.map((container, idx) => (
<div key={container.id || idx} className='card' style={{ marginBottom: '1em' }}>
<div>
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', cursor: 'pointer' }}
onClick={() => {
const newExpanded = !expandedCards.get(container.id);
setExpandedCards(new Map(expandedCards).set(container.id, newExpanded));
}}
>
<div style={{ display: 'flex', gap: '10px', alignItems: 'center' }}>
<i className={`fas fa-${container.icon || 'robot'}`} style={{ verticalAlign: '-0.125em' }}></i>
<span>
<span className='stat-label'>{container.name}</span>#<span className='stat-label'>{container.id}</span>
</span>
</div>
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
<i
className={`fas fa-chevron-${expandedCards.get(container.id) ? 'up' : 'down'}`}
style={{ color: 'var(--primary)' }}
title='Toggle details'
/>
{!container.completion && (
<div className='spinner' />
)}
</div>
</div>
<div style={{ display: expandedCards.get(container.id) ? 'block' : 'none' }}>
{container.children && container.children.length > 0 && (
// Combine live updates with history
const allUpdates = [...liveUpdates, ...(statusData?.History || [])];
<div style={{ marginLeft: '2em', marginTop: '1em' }}>
<h4>Nested Observables</h4>
{container.children.map(child => {
const childKey = `child-${child.id}`;
const isExpanded = expandedCards.get(childKey);
return (
<div key={`${container.id}-child-${child.id}`} className='card' style={{ background: '#222', marginBottom: '0.5em' }}>
<div style={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center', cursor: 'pointer' }}
onClick={() => {
const newExpanded = !expandedCards.get(childKey);
setExpandedCards(new Map(expandedCards).set(childKey, newExpanded));
}}
>
<div style={{ display: 'flex', gap: '10px', alignItems: 'center' }}>
<i className={`fas fa-${child.icon || 'robot'}`} style={{ verticalAlign: '-0.125em' }}></i>
<span>
<span className='stat-label'>{child.name}</span>#<span className='stat-label'>{child.id}</span>
</span>
</div>
<div style={{ display: 'flex', alignItems: 'center', gap: '8px' }}>
<i
className={`fas fa-chevron-${isExpanded ? 'up' : 'down'}`}
style={{ color: 'var(--primary)' }}
title='Toggle details'
/>
{!child.completion && (
<div className='spinner' />
)}
</div>
</div>
<div style={{ display: isExpanded ? 'block' : 'none' }}>
{child.creation && (
<div>
<h5>Creation:</h5>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(child.creation || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
{child.progress && child.progress.length > 0 && (
<div>
<h5>Progress:</h5>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(child.progress || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
{child.completion && (
<div>
<h5>Completion:</h5>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(child.completion || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
</div>
</div>
);
})}
</div>
)}
{container.creation && (
<div>
<h4>Creation:</h4>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(container.creation || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
{container.progress && container.progress.length > 0 && (
<div>
<h4>Progress:</h4>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(container.progress || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
{container.completion && (
<div>
<h4>Completion:</h4>
<pre className="hljs"><code>
<div dangerouslySetInnerHTML={{ __html: hljs.highlight(JSON.stringify(container.completion || {}, null, 2), { language: 'json' }).value }}></div>
</code></pre>
</div>
)}
</div>
return (
<div className="agent-status-container">
<header className="page-header">
<div className="header-content">
<h1>
<Link to="/agents" className="back-link">
<i className="fas fa-arrow-left"></i>
</Link>
Agent Status: {name}
</h1>
</div>
</header>
<div className="chat-container bg-gray-800 shadow-lg rounded-lg">
{/* Chat Messages */}
<div className="chat-messages p-4">
{allUpdates.length > 0 ? (
allUpdates.map((item, index) => (
<div key={index} className="status-item mb-4">
<div className="bg-gray-700 p-4 rounded-lg">
<h2 className="text-sm font-semibold mb-2">Agent Action:</h2>
<div className="status-details">
<div className="status-row">
<span className="status-label">{index}</span>
<span className="status-value">{formatValue(item)}</span>
</div>
</div>
))}
</div>
</div>
))
) : (
<div className="no-status-data">
<p>No status data available for this agent.</p>
</div>
)}
</div>
)}
</div>
</div>
);
}

View File

@@ -241,14 +241,13 @@ func (app *App) registerRoutes(pool *state.AgentPool, webapp *fiber.App) {
entries := []string{}
for _, h := range Reverse(history.Results()) {
entries = append(entries, fmt.Sprintf(`Reasoning: %s
Action taken: %+v
Parameters: %+v
Result: %s`,
entries = append(entries, fmt.Sprintf(
"Result: %v Action: %v Params: %v Reasoning: %v",
h.Result,
h.Action.Definition().Name,
h.Params,
h.Reasoning,
h.ActionCurrentState.Action.Definition().Name,
h.ActionCurrentState.Params,
h.Result))
))
}
return c.JSON(fiber.Map{
@@ -257,21 +256,6 @@ func (app *App) registerRoutes(pool *state.AgentPool, webapp *fiber.App) {
})
})
webapp.Get("/api/agent/:name/observables", func(c *fiber.Ctx) error {
name := c.Params("name")
agent := pool.GetAgent(name)
if agent == nil {
return c.Status(fiber.StatusNotFound).JSON(fiber.Map{
"error": "Agent not found",
})
}
return c.JSON(fiber.Map{
"Name": name,
"History": agent.Observer().History(),
})
})
webapp.Post("/settings/import", app.ImportAgent(pool))
webapp.Get("/settings/export/:name", app.ExportAgent(pool))