Files
LocalAGI/pkg/llm/json.go
Ettore Di Giacinto e32a569796 try to fixup tests, enable e2e (#53)
* try to fixup tests, enable e2e

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Generate JSON character data with tools

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Rework generation of character

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>

* Simplify text

Signed-off-by: mudler <mudler@localai.io>

* Relax some test constraints

Signed-off-by: mudler <mudler@localai.io>

* Fixups

* Properly fit schema generation

* Swap default model

* ci fixups

---------

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
Signed-off-by: mudler <mudler@localai.io>
2025-03-18 23:28:02 +01:00

89 lines
2.2 KiB
Go

package llm
import (
"context"
"encoding/json"
"fmt"
"github.com/sashabaranov/go-openai"
"github.com/sashabaranov/go-openai/jsonschema"
)
// generateAnswer generates an answer for the given text using the OpenAI API
func GenerateJSON(ctx context.Context, client *openai.Client, model, text string, i interface{}) error {
req := openai.ChatCompletionRequest{
ResponseFormat: &openai.ChatCompletionResponseFormat{Type: openai.ChatCompletionResponseFormatTypeJSONObject},
Model: model,
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: text,
},
},
}
resp, err := client.CreateChatCompletion(ctx, req)
if err != nil {
return fmt.Errorf("failed to generate answer: %v", err)
}
if len(resp.Choices) == 0 {
return fmt.Errorf("no response from OpenAI API")
}
err = json.Unmarshal([]byte(resp.Choices[0].Message.Content), i)
if err != nil {
return err
}
return nil
}
func GenerateJSONFromStruct(ctx context.Context, client *openai.Client, guidance, model string, i interface{}) error {
// TODO: use functions?
exampleJSON, err := json.Marshal(i)
if err != nil {
return err
}
return GenerateJSON(ctx, client, model, "Generate a character as JSON data. "+guidance+". This is the JSON fields that should contain: "+string(exampleJSON), i)
}
func GenerateTypedJSON(ctx context.Context, client *openai.Client, guidance, model string, i jsonschema.Definition, dst interface{}) error {
decision := openai.ChatCompletionRequest{
Model: model,
Messages: []openai.ChatCompletionMessage{
{
Role: "user",
Content: "Generate a character as JSON data. " + guidance,
},
},
Tools: []openai.Tool{
{
Type: openai.ToolTypeFunction,
Function: openai.FunctionDefinition{
Name: "identity",
Parameters: i,
},
},
},
ToolChoice: "identity",
}
resp, err := client.CreateChatCompletion(ctx, decision)
if err != nil {
return err
}
if len(resp.Choices) != 1 {
return fmt.Errorf("no choices: %d", len(resp.Choices))
}
msg := resp.Choices[0].Message
if len(msg.ToolCalls) == 0 {
return fmt.Errorf("no tool calls: %d", len(msg.ToolCalls))
}
return json.Unmarshal([]byte(msg.ToolCalls[0].Function.Arguments), dst)
}