chore(tests): Mock LLM in tests for PRs
This saves time when testing on CPU which is the only sensible thing to do on GitHub CI for PRs. For releases or once the commit is merged we could use an external runner with GPU or just wait. Signed-off-by: Richard Palethorpe <io@richiejp.com>
This commit is contained in:
@@ -1,29 +1,57 @@
|
||||
package agent_test
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
|
||||
. "github.com/mudler/LocalAGI/core/agent"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
)
|
||||
|
||||
var _ = Describe("Agent test", func() {
|
||||
Context("identity", func() {
|
||||
var agent *Agent
|
||||
|
||||
BeforeEach(func() {
|
||||
Eventually(func() error {
|
||||
// test apiURL is working and available
|
||||
_, err := http.Get(apiURL + "/readyz")
|
||||
return err
|
||||
}, "10m", "10s").ShouldNot(HaveOccurred())
|
||||
})
|
||||
// BeforeEach(func() {
|
||||
// Eventually(func() error {
|
||||
// // test apiURL is working and available
|
||||
// _, err := http.Get(apiURL + "/readyz")
|
||||
// return err
|
||||
// }, "10m", "10s").ShouldNot(HaveOccurred())
|
||||
// })
|
||||
|
||||
It("generates all the fields with random data", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, testModel)
|
||||
} else {
|
||||
llmClient = &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
ToolCalls: []openai.ToolCall{{
|
||||
ID: "tool_call_id_1",
|
||||
Type: "function",
|
||||
Function: openai.FunctionCall{
|
||||
Name: "generate_identity",
|
||||
Arguments: `{"name":"John Doe","age":"42","job_occupation":"Engineer","hobbies":["reading","hiking"],"favorites_music_genres":["Jazz"]}`,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
}
|
||||
var err error
|
||||
agent, err = New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithTimeout("10m"),
|
||||
WithRandomIdentity(),
|
||||
@@ -37,14 +65,40 @@ var _ = Describe("Agent test", func() {
|
||||
Expect(agent.Character.MusicTaste).ToNot(BeEmpty())
|
||||
})
|
||||
It("detect an invalid character", func() {
|
||||
mock := &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("invalid character")
|
||||
},
|
||||
}
|
||||
var err error
|
||||
agent, err = New(WithRandomIdentity())
|
||||
agent, err = New(
|
||||
WithLLMClient(mock),
|
||||
WithRandomIdentity(),
|
||||
)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
It("generates all the fields", func() {
|
||||
mock := &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
ToolCalls: []openai.ToolCall{{
|
||||
ID: "tool_call_id_2",
|
||||
Type: "function",
|
||||
Function: openai.FunctionCall{
|
||||
Name: "generate_identity",
|
||||
Arguments: `{"name":"Gandalf","age":"90","job_occupation":"Wizard","hobbies":["magic","reading"],"favorites_music_genres":["Classical"]}`,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
var err error
|
||||
|
||||
agent, err := New(
|
||||
WithLLMClient(mock),
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithModel(testModel),
|
||||
WithRandomIdentity("An 90-year old man with a long beard, a wizard, who lives in a tower."),
|
||||
|
||||
Reference in New Issue
Block a user