feat: track conversations inside connectors (#92)

* switch to observer pattern

Signed-off-by: mudler <mudler@localai.io>

* keep conversation history in telegram and slack

* generalize with conversation tracker

---------

Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto
2025-03-25 16:31:03 +01:00
committed by GitHub
parent 53c1554d55
commit d0cfc4c317
8 changed files with 422 additions and 92 deletions

View File

@@ -40,6 +40,8 @@ type Agent struct {
newConversations chan openai.ChatCompletionMessage
mcpActions types.Actions
newMessagesSubscribers []func(openai.ChatCompletionMessage)
}
type RAGDB interface {
@@ -64,12 +66,13 @@ func New(opts ...Option) (*Agent, error) {
ctx, cancel := context.WithCancel(c)
a := &Agent{
jobQueue: make(chan *types.Job),
options: options,
client: client,
Character: options.character,
currentState: &action.AgentInternalState{},
context: types.NewActionContext(ctx, cancel),
jobQueue: make(chan *types.Job),
options: options,
client: client,
Character: options.character,
currentState: &action.AgentInternalState{},
context: types.NewActionContext(ctx, cancel),
newMessagesSubscribers: options.newConversationsSubscribers,
}
if a.options.statefile != "" {
@@ -102,9 +105,27 @@ func New(opts ...Option) (*Agent, error) {
"model", a.options.LLMAPI.Model,
)
a.startNewConversationsConsumer()
return a, nil
}
func (a *Agent) startNewConversationsConsumer() {
go func() {
for {
select {
case <-a.context.Done():
return
case msg := <-a.newConversations:
for _, s := range a.newMessagesSubscribers {
s(msg)
}
}
}
}()
}
// StopAction stops the current action
// if any. Can be called before adding a new job.
func (a *Agent) StopAction() {
@@ -124,10 +145,6 @@ func (a *Agent) ActionContext() context.Context {
return a.actionContext.Context
}
func (a *Agent) ConversationChannel() chan openai.ChatCompletionMessage {
return a.newConversations
}
// Ask is a pre-emptive, blocking call that returns the response as soon as it's ready.
// It discards any other computation.
func (a *Agent) Ask(opts ...types.JobOption) *types.JobResult {

View File

@@ -19,8 +19,7 @@ func (a *Agent) knowledgeBaseLookup(conv Messages) {
// Walk conversation from bottom to top, and find the first message of the user
// to use it as a query to the KB
var userMessage string
userMessage = conv.GetLatestUserMessage().Content
userMessage := conv.GetLatestUserMessage().Content
xlog.Info("[Knowledge Base Lookup] Last user message", "agent", a.Character.Name, "message", userMessage, "lastMessage", conv.GetLatestUserMessage())

View File

@@ -6,6 +6,7 @@ import (
"time"
"github.com/mudler/LocalAgent/core/types"
"github.com/sashabaranov/go-openai"
)
type Option func(*options) error
@@ -49,6 +50,8 @@ type options struct {
conversationsPath string
mcpServers []MCPServer
newConversationsSubscribers []func(openai.ChatCompletionMessage)
}
func (o *options) SeparatedMultimodalModel() bool {
@@ -125,6 +128,13 @@ func EnableKnowledgeBaseWithResults(results int) Option {
}
}
func WithNewConversationSubscriber(sub func(openai.ChatCompletionMessage)) Option {
return func(o *options) error {
o.newConversationsSubscribers = append(o.newConversationsSubscribers, sub)
return nil
}
}
var EnableInitiateConversations = func(o *options) error {
o.initiateConversations = true
return nil