webui, fixes

This commit is contained in:
mudler
2024-04-05 17:14:53 +02:00
parent 32f4e53242
commit 5f29125bbb
13 changed files with 510 additions and 142 deletions

View File

@@ -1,10 +1,8 @@
package agent
import (
"bytes"
"context"
"fmt"
"html/template"
"github.com/mudler/local-agent-framework/action"
@@ -85,40 +83,30 @@ func (a *Agent) decision(
return &decisionResult{actionParams: params}, nil
}
type Messages []openai.ChatCompletionMessage
func (m Messages) ToOpenAI() []openai.ChatCompletionMessage {
return []openai.ChatCompletionMessage(m)
}
func (m Messages) Exist(content string) bool {
for _, cc := range m {
if cc.Content == content {
return true
}
}
return false
}
func (a *Agent) generateParameters(ctx context.Context, pickTemplate string, act Action, c []openai.ChatCompletionMessage, reasoning string) (*decisionResult, error) {
// prepare the prompt
stateHUD := bytes.NewBuffer([]byte{})
promptTemplate, err := template.New("pickAction").Parse(hudTemplate)
if err != nil {
return nil, err
}
actions := a.systemInternalActions()
// Get all the actions definitions
definitions := []action.ActionDefinition{}
for _, m := range actions {
definitions = append(definitions, m.Definition())
}
var promptHUD *PromptHUD
if a.options.enableHUD {
h := a.prepareHUD()
promptHUD = &h
}
err = promptTemplate.Execute(stateHUD, struct {
HUD *PromptHUD
Actions []action.ActionDefinition
Reasoning string
Messages []openai.ChatCompletionMessage
}{
Actions: definitions,
Reasoning: reasoning,
HUD: promptHUD,
})
stateHUD, err := renderTemplate(pickTemplate, promptHUD, a.systemInternalActions(), reasoning)
if err != nil {
return nil, err
}
@@ -127,18 +115,12 @@ func (a *Agent) generateParameters(ctx context.Context, pickTemplate string, act
// add a message at the top with it
conversation := c
found := false
for _, cc := range c {
if cc.Content == stateHUD.String() {
found = true
break
}
}
if !found && a.options.enableHUD {
if !Messages(c).Exist(stateHUD) && a.options.enableHUD {
conversation = append([]openai.ChatCompletionMessage{
{
Role: "system",
Content: stateHUD.String(),
Content: stateHUD,
},
}, conversation...)
}
@@ -170,113 +152,27 @@ func (a *Agent) prepareHUD() PromptHUD {
}
}
func (a *Agent) prepareConversationParse(templ string, messages []openai.ChatCompletionMessage, reasoning string) ([]openai.ChatCompletionMessage, Actions, error) {
// prepare the prompt
prompt := bytes.NewBuffer([]byte{})
promptTemplate, err := template.New("pickAction").Parse(templ)
if err != nil {
return nil, []Action{}, err
}
actions := a.systemInternalActions()
// Get all the actions definitions
definitions := []action.ActionDefinition{}
for _, m := range actions {
definitions = append(definitions, m.Definition())
}
var promptHUD *PromptHUD
if a.options.enableHUD {
h := a.prepareHUD()
promptHUD = &h
}
err = promptTemplate.Execute(prompt, struct {
HUD *PromptHUD
Actions []action.ActionDefinition
Reasoning string
Messages []openai.ChatCompletionMessage
}{
Actions: definitions,
Reasoning: reasoning,
Messages: messages,
HUD: promptHUD,
})
if err != nil {
return nil, []Action{}, err
}
if a.options.debugMode {
fmt.Println("=== PROMPT START ===", prompt.String(), "=== PROMPT END ===")
}
conversation := []openai.ChatCompletionMessage{}
conversation = append(conversation, openai.ChatCompletionMessage{
Role: "user",
Content: prompt.String(),
})
return conversation, actions, nil
}
// pickAction picks an action based on the conversation
func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.ChatCompletionMessage) (Action, string, error) {
c := messages
// prepare the prompt
prompt := bytes.NewBuffer([]byte{})
promptTemplate, err := template.New("pickAction").Parse(templ)
if err != nil {
return nil, "", err
}
actions := a.systemInternalActions()
// Get all the actions definitions
definitions := []action.ActionDefinition{}
for _, m := range actions {
definitions = append(definitions, m.Definition())
}
var promptHUD *PromptHUD
if a.options.enableHUD {
h := a.prepareHUD()
promptHUD = &h
}
err = promptTemplate.Execute(prompt, struct {
HUD *PromptHUD
Actions []action.ActionDefinition
Reasoning string
Messages []openai.ChatCompletionMessage
}{
Actions: definitions,
Messages: messages,
HUD: promptHUD,
})
prompt, err := renderTemplate(templ, promptHUD, a.systemInternalActions(), "")
if err != nil {
return nil, "", err
}
// Get the LLM to think on what to do
// and have a thought
found := false
for _, cc := range c {
if cc.Content == prompt.String() {
found = true
break
}
}
if !found {
if !Messages(c).Exist(prompt) {
c = append([]openai.ChatCompletionMessage{
{
Role: "system",
Content: prompt.String(),
Content: prompt,
},
}, c...)
}
@@ -305,7 +201,7 @@ func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.
// From the thought, get the action call
// Get all the available actions IDs
actionsID := []string{}
for _, m := range actions {
for _, m := range a.systemInternalActions() {
actionsID = append(actionsID, m.Definition().Name.String())
}
intentionsTools := action.NewIntention(actionsID...)
@@ -336,7 +232,7 @@ func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.
}
// Find the action
chosenAction := actions.Find(actionChoice.Tool)
chosenAction := a.systemInternalActions().Find(actionChoice.Tool)
if chosenAction == nil {
return nil, "", fmt.Errorf("no action found for intent:" + actionChoice.Tool)
}

View File

@@ -311,6 +311,44 @@ func (a *Agent) consumeJob(job *Job, role string) {
}
}
// If we have already a reply from the action, just return it.
// Otherwise generate a full conversation to get a proper message response
// if chosenAction.Definition().Name.Is(action.ReplyActionName) {
// replyResponse := action.ReplyResponse{}
// if err := params.actionParams.Unmarshal(&replyResponse); err != nil {
// job.Result.Finish(fmt.Errorf("error unmarshalling reply response: %w", err))
// return
// }
// if replyResponse.Message != "" {
// job.Result.SetResponse(replyResponse.Message)
// job.Result.Finish(nil)
// return
// }
// }
// If we have a hud, display it
if a.options.enableHUD {
var promptHUD *PromptHUD
if a.options.enableHUD {
h := a.prepareHUD()
promptHUD = &h
}
prompt, err := renderTemplate(hudTemplate, promptHUD, a.systemInternalActions(), reasoning)
if err != nil {
job.Result.Finish(fmt.Errorf("error renderTemplate: %w", err))
return
}
if !Messages(a.currentConversation).Exist(prompt) {
a.currentConversation = append([]openai.ChatCompletionMessage{
{
Role: "system",
Content: prompt,
},
}, a.currentConversation...)
}
}
// Generate a human-readable response
resp, err := a.client.CreateChatCompletion(ctx,
openai.ChatCompletionRequest{
@@ -413,7 +451,7 @@ func (a *Agent) Run() error {
// Expose a REST API to interact with the agent to ask it things
todoTimer := time.NewTicker(1 * time.Minute)
todoTimer := time.NewTicker(a.options.periodicRuns)
for {
select {
case job := <-a.jobQueue:

View File

@@ -103,7 +103,7 @@ func (a *FakeInternetAction) Definition() action.ActionDefinition {
var _ = Describe("Agent test", func() {
Context("jobs", func() {
It("pick the correct action", func() {
FIt("pick the correct action", func() {
agent, err := New(
WithLLMAPIURL(apiModel),
WithModel(testModel),
@@ -132,15 +132,18 @@ var _ = Describe("Agent test", func() {
append(debugOptions,
WithText("Now I want to know the weather in Paris"),
)...)
conversation := agent.CurrentConversation()
Expect(len(conversation)).To(Equal(10), fmt.Sprint(conversation))
for _, r := range res.State {
reasons = append(reasons, r.Result)
}
Expect(reasons).ToNot(ContainElement(testActionResult), fmt.Sprint(res))
Expect(reasons).ToNot(ContainElement(testActionResult2), fmt.Sprint(res))
Expect(reasons).To(ContainElement(testActionResult3), fmt.Sprint(res))
// conversation := agent.CurrentConversation()
// for _, r := range res.State {
// reasons = append(reasons, r.Result)
// }
// Expect(len(conversation)).To(Equal(10), fmt.Sprint(conversation))
})
It("pick the correct action", func() {
agent, err := New(
@@ -171,7 +174,7 @@ var _ = Describe("Agent test", func() {
EnableHUD,
DebugMode,
// EnableStandaloneJob,
WithRandomIdentity(),
// WithRandomIdentity(),
WithPermanentGoal("I want to learn to play music"),
)
Expect(err).ToNot(HaveOccurred())
@@ -221,7 +224,7 @@ var _ = Describe("Agent test", func() {
},
},
),
WithRandomIdentity(),
//WithRandomIdentity(),
WithPermanentGoal("get the weather of all the cities in italy and store the results"),
)
Expect(err).ToNot(HaveOccurred())

View File

@@ -3,6 +3,7 @@ package agent
import (
"context"
"strings"
"time"
)
type Option func(*options) error
@@ -24,6 +25,7 @@ type options struct {
statefile string
context context.Context
permanentGoal string
periodicRuns time.Duration
// callbacks
reasoningCallback func(ActionCurrentState) bool
@@ -99,6 +101,17 @@ func WithPermanentGoal(goal string) Option {
}
}
func WithPeriodicRuns(duration string) Option {
return func(o *options) error {
t, err := time.ParseDuration(duration)
if err != nil {
o.periodicRuns, _ = time.ParseDuration("1m")
}
o.periodicRuns = t
return nil
}
}
func WithContext(ctx context.Context) Option {
return func(o *options) error {
o.context = ctx

View File

@@ -1,11 +1,51 @@
package agent
const hudTemplate = `{{with .HUD }}{{if .ShowCharacter}}You have a character and your replies and actions might be influenced by it.
{{if .Character.Name}}Name: {{.Character.Name}}
{{end}}{{if .Character.Age}}Age: {{.Character.Age}}
{{end}}{{if .Character.Occupation}}Occupation: {{.Character.Occupation}}
{{end}}{{if .Character.Hobbies}}Hobbies: {{.Character.Hobbies}}
{{end}}{{if .Character.MusicTaste}}Music taste: {{.Character.MusicTaste}}
import (
"bytes"
"html/template"
"github.com/mudler/local-agent-framework/action"
"github.com/sashabaranov/go-openai"
)
func renderTemplate(templ string, hud *PromptHUD, actions Actions, reasoning string) (string, error) {
// prepare the prompt
prompt := bytes.NewBuffer([]byte{})
promptTemplate, err := template.New("pickAction").Parse(templ)
if err != nil {
return "", err
}
// Get all the actions definitions
definitions := []action.ActionDefinition{}
for _, m := range actions {
definitions = append(definitions, m.Definition())
}
err = promptTemplate.Execute(prompt, struct {
HUD *PromptHUD
Actions []action.ActionDefinition
Reasoning string
Messages []openai.ChatCompletionMessage
}{
Actions: definitions,
HUD: hud,
Reasoning: reasoning,
})
if err != nil {
return "", err
}
return prompt.String(), nil
}
const hudTemplate = `{{with .HUD }}{{if .ShowCharacter}}The assistant acts like an human, has a character and the replies and actions might be influenced by it.
{{if .Character.Name}}This is the assistant name: {{.Character.Name}}
{{end}}{{if .Character.Age}}This is the assistant age: {{.Character.Age}}
{{end}}{{if .Character.Occupation}}This is the assistant jop: {{.Character.Occupation}}
{{end}}{{if .Character.Hobbies}}This is the assistant's hobbies: {{.Character.Hobbies}}
{{end}}{{if .Character.MusicTaste}}This is the assistant's music taste: {{.Character.MusicTaste}}
{{end}}
{{end}}