Fixups to long memory, and avoid re-initing RAGDB recursively

Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
mudler
2024-12-18 12:41:18 +01:00
parent c2035c95c5
commit 735bab5e32
3 changed files with 16 additions and 14 deletions

View File

@@ -282,8 +282,6 @@ func (a *Agent) consumeJob(job *Job, role string) {
// We are self evaluating if we consume the job as a system role // We are self evaluating if we consume the job as a system role
selfEvaluation := role == SystemRole selfEvaluation := role == SystemRole
memory := a.options.enableKB && a.options.ragdb != nil
a.Lock() a.Lock()
// Set the action context // Set the action context
ctx, cancel := context.WithCancel(context.Background()) ctx, cancel := context.WithCancel(context.Background())
@@ -337,18 +335,27 @@ func (a *Agent) consumeJob(job *Job, role string) {
} }
} }
if job.Text != "" {
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: role,
Content: job.Text,
})
}
// TODO: move to a promptblock? // TODO: move to a promptblock?
// RAG // RAG
if memory { if a.options.enableLongTermMemory && len(a.currentConversation) > 0 {
// Walk conversation from bottom to top, and find the first message of the user // Walk conversation from bottom to top, and find the first message of the user
// to use it as a query to the KB // to use it as a query to the KB
var userMessage string var userMessage string
for i := len(a.currentConversation) - 1; i >= 0; i-- { for i := len(a.currentConversation); i == 0; i-- {
xlog.Info("[Long term memory] Conversation", "role", a.currentConversation[i].Role, "Content", a.currentConversation[i].Content)
if a.currentConversation[i].Role == "user" { if a.currentConversation[i].Role == "user" {
userMessage = a.currentConversation[i].Content userMessage = a.currentConversation[i].Content
break break
} }
} }
xlog.Info("[Long term memory] User message", "agent", a.Character.Name, "message", userMessage)
if userMessage != "" { if userMessage != "" {
results, err := a.options.ragdb.Search(userMessage, a.options.kbResults) results, err := a.options.ragdb.Search(userMessage, a.options.kbResults)
@@ -380,13 +387,8 @@ func (a *Agent) consumeJob(job *Job, role string) {
}}, a.currentConversation...) }}, a.currentConversation...)
} }
} }
} } else {
xlog.Info("[Long term memory] No conversation available", "agent", a.Character.Name)
if job.Text != "" {
a.currentConversation = append(a.currentConversation, openai.ChatCompletionMessage{
Role: role,
Content: job.Text,
})
} }
var pickTemplate string var pickTemplate string

View File

@@ -82,7 +82,7 @@ func main() {
if len(db.Database) > 0 && kbdisableIndexing != "true" { if len(db.Database) > 0 && kbdisableIndexing != "true" {
xlog.Info("Loading knowledgebase from disk, to skip run with KBDISABLEINDEX=true") xlog.Info("Loading knowledgebase from disk, to skip run with KBDISABLEINDEX=true")
if err := db.SaveAllToStore(); err != nil { if err := db.PopulateRAGDB(); err != nil {
xlog.Info("Error storing in the KB", err) xlog.Info("Error storing in the KB", err)
} }
} }

View File

@@ -63,13 +63,13 @@ func NewInMemoryDB(knowledgebase string, store RAGDB) (*InMemoryDatabase, error)
}, nil }, nil
} }
func (db *InMemoryDatabase) SaveAllToStore() error { func (db *InMemoryDatabase) PopulateRAGDB() error {
for _, d := range db.Database { for _, d := range db.Database {
if d == "" { if d == "" {
// skip empty chunks // skip empty chunks
continue continue
} }
err := db.Store(d) err := db.RAGDB.Store(d)
if err != nil { if err != nil {
return fmt.Errorf("error storing in the KB: %w", err) return fmt.Errorf("error storing in the KB: %w", err)
} }