chore(tests): Mock LLM in tests for PRs
This saves time when testing on CPU which is the only sensible thing to do on GitHub CI for PRs. For releases or once the commit is merged we could use an external runner with GPU or just wait. Signed-off-by: Richard Palethorpe <io@richiejp.com>
This commit is contained in:
@@ -1,13 +1,33 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
func NewClient(APIKey, URL, timeout string) *openai.Client {
|
||||
type LLMClient interface {
|
||||
CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)
|
||||
CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error)
|
||||
}
|
||||
|
||||
type realClient struct {
|
||||
*openai.Client
|
||||
}
|
||||
|
||||
func (r *realClient) CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return r.Client.CreateChatCompletion(ctx, req)
|
||||
}
|
||||
|
||||
func (r *realClient) CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error) {
|
||||
return r.Client.CreateImage(ctx, req)
|
||||
}
|
||||
|
||||
// NewClient returns a real OpenAI client as LLMClient
|
||||
func NewClient(APIKey, URL, timeout string) LLMClient {
|
||||
// Set up OpenAI client
|
||||
if APIKey == "" {
|
||||
//log.Fatal("OPENAI_API_KEY environment variable not set")
|
||||
@@ -18,11 +38,12 @@ func NewClient(APIKey, URL, timeout string) *openai.Client {
|
||||
|
||||
dur, err := time.ParseDuration(timeout)
|
||||
if err != nil {
|
||||
xlog.Error("Failed to parse timeout", "error", err)
|
||||
dur = 150 * time.Second
|
||||
}
|
||||
|
||||
config.HTTPClient = &http.Client{
|
||||
Timeout: dur,
|
||||
}
|
||||
return openai.NewClientWithConfig(config)
|
||||
return &realClient{openai.NewClientWithConfig(config)}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
"github.com/sashabaranov/go-openai/jsonschema"
|
||||
)
|
||||
|
||||
func GenerateTypedJSONWithGuidance(ctx context.Context, client *openai.Client, guidance, model string, i jsonschema.Definition, dst any) error {
|
||||
func GenerateTypedJSONWithGuidance(ctx context.Context, client LLMClient, guidance, model string, i jsonschema.Definition, dst any) error {
|
||||
return GenerateTypedJSONWithConversation(ctx, client, []openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: "user",
|
||||
@@ -19,7 +19,7 @@ func GenerateTypedJSONWithGuidance(ctx context.Context, client *openai.Client, g
|
||||
}, model, i, dst)
|
||||
}
|
||||
|
||||
func GenerateTypedJSONWithConversation(ctx context.Context, client *openai.Client, conv []openai.ChatCompletionMessage, model string, i jsonschema.Definition, dst any) error {
|
||||
func GenerateTypedJSONWithConversation(ctx context.Context, client LLMClient, conv []openai.ChatCompletionMessage, model string, i jsonschema.Definition, dst any) error {
|
||||
toolName := "json"
|
||||
decision := openai.ChatCompletionRequest{
|
||||
Model: model,
|
||||
|
||||
25
pkg/llm/mock_client.go
Normal file
25
pkg/llm/mock_client.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
type MockClient struct {
|
||||
CreateChatCompletionFunc func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)
|
||||
CreateImageFunc func(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error)
|
||||
}
|
||||
|
||||
func (m *MockClient) CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
if m.CreateChatCompletionFunc != nil {
|
||||
return m.CreateChatCompletionFunc(ctx, req)
|
||||
}
|
||||
return openai.ChatCompletionResponse{}, nil
|
||||
}
|
||||
|
||||
func (m *MockClient) CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error) {
|
||||
if m.CreateImageFunc != nil {
|
||||
return m.CreateImageFunc(ctx, req)
|
||||
}
|
||||
return openai.ImageResponse{}, nil
|
||||
}
|
||||
Reference in New Issue
Block a user