feat(planning): enable agent planning (#68)

* feat(planning): Allow the agent to plan subtasks

Signed-off-by: mudler <mudler@localai.io>

* feat(planning): enable planning toggle in the webui

Signed-off-by: mudler <mudler@localai.io>

* feat(planning): take in consideration the overall goal

Signed-off-by: mudler <mudler@localai.io>

* Update core/action/plan.go

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

---------

Signed-off-by: mudler <mudler@localai.io>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Ettore Di Giacinto
2025-03-21 12:28:11 +01:00
committed by GitHub
parent 638eedc2a0
commit 33483ab4b9
37 changed files with 259 additions and 35 deletions

View File

@@ -4,7 +4,6 @@ import (
"context"
"fmt"
"os"
"strings"
"sync"
"time"
@@ -571,6 +570,11 @@ func (a *Agent) consumeJob(job *Job, role string) {
return
}
if err := a.handlePlanning(ctx, job, chosenAction, actionParams, reasoning, pickTemplate); err != nil {
job.Result.Finish(fmt.Errorf("error running action: %w", err))
return
}
if !job.Callback(ActionCurrentState{chosenAction, actionParams, reasoning}) {
job.Result.SetResult(ActionState{ActionCurrentState{chosenAction, actionParams, reasoning}, action.ActionResult{Result: "stopped by callback"}})
job.Result.Conversation = a.currentConversation
@@ -620,27 +624,7 @@ func (a *Agent) consumeJob(job *Job, role string) {
job.CallbackWithResult(stateResult)
xlog.Debug("Action executed", "agent", a.Character.Name, "action", chosenAction.Definition().Name, "result", result)
// calling the function
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: "assistant",
ToolCalls: []openai.ToolCall{
{
Type: openai.ToolTypeFunction,
Function: openai.FunctionCall{
Name: chosenAction.Definition().Name.String(),
Arguments: actionParams.String(),
},
},
},
})
// result of calling the function
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleTool,
Content: result.Result,
Name: chosenAction.Definition().Name.String(),
ToolCallID: chosenAction.Definition().Name.String(),
})
a.addFunctionResultToConversation(chosenAction, actionParams, result)
//a.currentConversation = append(a.currentConversation, messages...)
//a.currentConversation = messages
@@ -776,8 +760,7 @@ func (a *Agent) consumeJob(job *Job, role string) {
}
// If we didn't got any message, we can use the response from the action
if chosenAction.Definition().Name.Is(action.ReplyActionName) && msg.Content == "" ||
strings.Contains(msg.Content, "<tool_call>") {
if chosenAction.Definition().Name.Is(action.ReplyActionName) && msg.Content == "" {
xlog.Info("No output returned from conversation, using the action response as a reply " + replyResponse.Message)
msg = openai.ChatCompletionMessage{
@@ -794,6 +777,30 @@ func (a *Agent) consumeJob(job *Job, role string) {
job.Result.Finish(nil)
}
func (a *Agent) addFunctionResultToConversation(chosenAction Action, actionParams action.ActionParams, result action.ActionResult) {
// calling the function
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: "assistant",
ToolCalls: []openai.ToolCall{
{
Type: openai.ToolTypeFunction,
Function: openai.FunctionCall{
Name: chosenAction.Definition().Name.String(),
Arguments: actionParams.String(),
},
},
},
})
// result of calling the function
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleTool,
Content: result.Result,
Name: chosenAction.Definition().Name.String(),
ToolCallID: chosenAction.Definition().Name.String(),
})
}
// This is running in the background.
func (a *Agent) periodicallyRun(timer *time.Timer) {
// Remember always to reset the timer - if we don't the agent will stop..