try to fixup tests, enable e2e (#53)
* try to fixup tests, enable e2e Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Generate JSON character data with tools Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Rework generation of character Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Simplify text Signed-off-by: mudler <mudler@localai.io> * Relax some test constraints Signed-off-by: mudler <mudler@localai.io> * Fixups * Properly fit schema generation * Swap default model * ci fixups --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io> Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
committed by
GitHub
parent
31b5849d02
commit
e32a569796
128
pkg/client/responses.go
Normal file
128
pkg/client/responses.go
Normal file
@@ -0,0 +1,128 @@
|
||||
package localagent
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// RequestBody represents the message request to the AI model
|
||||
type RequestBody struct {
|
||||
Model string `json:"model"`
|
||||
Input string `json:"input"`
|
||||
InputMessages []InputMessage `json:"input_messages,omitempty"`
|
||||
Temperature *float64 `json:"temperature,omitempty"`
|
||||
MaxTokens *int `json:"max_output_tokens,omitempty"`
|
||||
}
|
||||
|
||||
// InputMessage represents a user input message
|
||||
type InputMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content []ContentItem `json:"content"`
|
||||
}
|
||||
|
||||
// ContentItem represents an item in a content array
|
||||
type ContentItem struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text,omitempty"`
|
||||
ImageURL string `json:"image_url,omitempty"`
|
||||
}
|
||||
|
||||
// ResponseBody represents the response from the AI model
|
||||
type ResponseBody struct {
|
||||
CreatedAt int64 `json:"created_at"`
|
||||
Status string `json:"status"`
|
||||
Error interface{} `json:"error,omitempty"`
|
||||
Output []ResponseMessage `json:"output"`
|
||||
}
|
||||
|
||||
// ResponseMessage represents a message in the response
|
||||
type ResponseMessage struct {
|
||||
Type string `json:"type"`
|
||||
Status string `json:"status"`
|
||||
Role string `json:"role"`
|
||||
Content []MessageContentItem `json:"content"`
|
||||
}
|
||||
|
||||
// MessageContentItem represents a content item in a message
|
||||
type MessageContentItem struct {
|
||||
Type string `json:"type"`
|
||||
Text string `json:"text"`
|
||||
}
|
||||
|
||||
// GetAIResponse sends a request to the AI model and returns the response
|
||||
func (c *Client) GetAIResponse(request *RequestBody) (*ResponseBody, error) {
|
||||
resp, err := c.doRequest(http.MethodPost, "/v1/responses", request)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
var response ResponseBody
|
||||
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||
return nil, fmt.Errorf("error decoding response: %w", err)
|
||||
}
|
||||
|
||||
// Check if there was an error in the response
|
||||
if response.Error != nil {
|
||||
return nil, fmt.Errorf("api error: %v", response.Error)
|
||||
}
|
||||
|
||||
return &response, nil
|
||||
}
|
||||
|
||||
// SimpleAIResponse is a helper function to get a simple text response from the AI
|
||||
func (c *Client) SimpleAIResponse(agentName, input string) (string, error) {
|
||||
temperature := 0.7
|
||||
request := &RequestBody{
|
||||
Model: agentName,
|
||||
Input: input,
|
||||
Temperature: &temperature,
|
||||
}
|
||||
|
||||
response, err := c.GetAIResponse(request)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Extract the text response from the output
|
||||
for _, msg := range response.Output {
|
||||
if msg.Role == "assistant" {
|
||||
for _, content := range msg.Content {
|
||||
if content.Type == "output_text" {
|
||||
return content.Text, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no text response found")
|
||||
}
|
||||
|
||||
// ChatAIResponse sends chat messages to the AI model
|
||||
func (c *Client) ChatAIResponse(agentName string, messages []InputMessage) (string, error) {
|
||||
temperature := 0.7
|
||||
request := &RequestBody{
|
||||
Model: agentName,
|
||||
InputMessages: messages,
|
||||
Temperature: &temperature,
|
||||
}
|
||||
|
||||
response, err := c.GetAIResponse(request)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
// Extract the text response from the output
|
||||
for _, msg := range response.Output {
|
||||
if msg.Role == "assistant" {
|
||||
for _, content := range msg.Content {
|
||||
if content.Type == "output_text" {
|
||||
return content.Text, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no text response found")
|
||||
}
|
||||
Reference in New Issue
Block a user