feat(api): add support to responses api (#52)
Signed-off-by: mudler <mudler@localai.io>
This commit is contained in:
committed by
GitHub
parent
29a8713427
commit
31b5849d02
@@ -203,6 +203,7 @@ Connect to IRC networks:
|
|||||||
| `/chat/:name` | POST | Send message & get response | [Example](#send-message) |
|
| `/chat/:name` | POST | Send message & get response | [Example](#send-message) |
|
||||||
| `/notify/:name` | GET | Send notification to agent | [Example](#notify-agent) |
|
| `/notify/:name` | GET | Send notification to agent | [Example](#notify-agent) |
|
||||||
| `/sse/:name` | GET | Real-time agent event stream | [Example](#agent-sse-stream) |
|
| `/sse/:name` | GET | Real-time agent event stream | [Example](#agent-sse-stream) |
|
||||||
|
| `/v1/responses` | POST | Send message & get response | [OpenAI's Responses](https://platform.openai.com/docs/api-reference/responses/create) |
|
||||||
|
|
||||||
<details>
|
<details>
|
||||||
<summary><strong>Curl Examples</strong></summary>
|
<summary><strong>Curl Examples</strong></summary>
|
||||||
|
|||||||
56
webui/app.go
56
webui/app.go
@@ -6,8 +6,10 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/mudler/LocalAgent/pkg/xlog"
|
"github.com/mudler/LocalAgent/pkg/xlog"
|
||||||
|
"github.com/mudler/LocalAgent/webui/types"
|
||||||
|
|
||||||
"github.com/mudler/LocalAgent/core/agent"
|
"github.com/mudler/LocalAgent/core/agent"
|
||||||
"github.com/mudler/LocalAgent/core/sse"
|
"github.com/mudler/LocalAgent/core/sse"
|
||||||
@@ -294,3 +296,57 @@ func (a *App) Chat(pool *state.AgentPool) func(c *fiber.Ctx) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (a *App) Responses(pool *state.AgentPool) func(c *fiber.Ctx) error {
|
||||||
|
return func(c *fiber.Ctx) error {
|
||||||
|
var request types.RequestBody
|
||||||
|
if err := c.BodyParser(&request); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
request.SetInputByType()
|
||||||
|
|
||||||
|
agentName := request.Model
|
||||||
|
|
||||||
|
messages := request.ToChatCompletionMessages()
|
||||||
|
|
||||||
|
a := pool.GetAgent(agentName)
|
||||||
|
if a == nil {
|
||||||
|
xlog.Info("Agent not found in pool", c.Params("name"))
|
||||||
|
return c.Status(http.StatusInternalServerError).JSON(types.ResponseBody{Error: "Agent not found"})
|
||||||
|
}
|
||||||
|
|
||||||
|
res := a.Ask(
|
||||||
|
agent.WithConversationHistory(messages),
|
||||||
|
)
|
||||||
|
if res.Error != nil {
|
||||||
|
xlog.Error("Error asking agent", "agent", agentName, "error", res.Error)
|
||||||
|
|
||||||
|
return c.Status(http.StatusInternalServerError).JSON(types.ResponseBody{Error: res.Error.Error()})
|
||||||
|
} else {
|
||||||
|
xlog.Info("we got a response from the agent", "agent", agentName, "response", res.Response)
|
||||||
|
}
|
||||||
|
|
||||||
|
response := types.ResponseBody{
|
||||||
|
Object: "response",
|
||||||
|
// "created_at": 1741476542,
|
||||||
|
CreatedAt: time.Now().Unix(),
|
||||||
|
Status: "completed",
|
||||||
|
Output: []types.ResponseMessage{
|
||||||
|
{
|
||||||
|
Type: "message",
|
||||||
|
Status: "completed",
|
||||||
|
Role: "assistant",
|
||||||
|
Content: []types.MessageContentItem{
|
||||||
|
types.MessageContentItem{
|
||||||
|
Type: "output_text",
|
||||||
|
Text: res.Response,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
return c.JSON(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -104,6 +104,8 @@ func (app *App) registerRoutes(pool *state.AgentPool, webapp *fiber.App) {
|
|||||||
webapp.Put("/pause/:name", app.Pause(pool))
|
webapp.Put("/pause/:name", app.Pause(pool))
|
||||||
webapp.Put("/start/:name", app.Start(pool))
|
webapp.Put("/start/:name", app.Start(pool))
|
||||||
|
|
||||||
|
webapp.Post("/v1/responses", app.Responses(pool))
|
||||||
|
|
||||||
webapp.Get("/talk/:name", func(c *fiber.Ctx) error {
|
webapp.Get("/talk/:name", func(c *fiber.Ctx) error {
|
||||||
return c.Render("views/chat", fiber.Map{
|
return c.Render("views/chat", fiber.Map{
|
||||||
// "Character": agent.Character,
|
// "Character": agent.Character,
|
||||||
|
|||||||
174
webui/types/openai.go
Normal file
174
webui/types/openai.go
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import "github.com/sashabaranov/go-openai"
|
||||||
|
|
||||||
|
// RequestBody represents the request body structure for the OpenAI API
|
||||||
|
type RequestBody struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Input interface{} `json:"input"`
|
||||||
|
InputText string `json:"input_text"`
|
||||||
|
InputMessages []InputMessage `json:"input_messages"`
|
||||||
|
Include []string `json:"include,omitempty"`
|
||||||
|
Instructions *string `json:"instructions,omitempty"`
|
||||||
|
MaxOutputTokens *int `json:"max_output_tokens,omitempty"`
|
||||||
|
Metadata map[string]string `json:"metadata,omitempty"`
|
||||||
|
ParallelToolCalls *bool `json:"parallel_tool_calls,omitempty"`
|
||||||
|
PreviousResponseID *string `json:"previous_response_id,omitempty"`
|
||||||
|
Reasoning *ReasoningConfig `json:"reasoning,omitempty"`
|
||||||
|
Store *bool `json:"store,omitempty"`
|
||||||
|
Stream *bool `json:"stream,omitempty"`
|
||||||
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
|
Text *TextConfig `json:"text,omitempty"`
|
||||||
|
ToolChoice interface{} `json:"tool_choice,omitempty"`
|
||||||
|
Tools []interface{} `json:"tools,omitempty"`
|
||||||
|
TopP *float64 `json:"top_p,omitempty"`
|
||||||
|
Truncation *string `json:"truncation,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RequestBody) SetInputByType() {
|
||||||
|
switch input := r.Input.(type) {
|
||||||
|
case string:
|
||||||
|
r.InputText = input
|
||||||
|
case []any:
|
||||||
|
for _, i := range input {
|
||||||
|
switch i := i.(type) {
|
||||||
|
case InputMessage:
|
||||||
|
r.InputMessages = append(r.InputMessages, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *RequestBody) ToChatCompletionMessages() []openai.ChatCompletionMessage {
|
||||||
|
result := []openai.ChatCompletionMessage{}
|
||||||
|
|
||||||
|
for _, m := range r.InputMessages {
|
||||||
|
content := []openai.ChatMessagePart{}
|
||||||
|
oneImageWasFound := false
|
||||||
|
for _, c := range m.Content {
|
||||||
|
switch c.Type {
|
||||||
|
case "text":
|
||||||
|
content = append(content, openai.ChatMessagePart{
|
||||||
|
Type: "text",
|
||||||
|
Text: c.Text,
|
||||||
|
})
|
||||||
|
case "image":
|
||||||
|
oneImageWasFound = true
|
||||||
|
content = append(content, openai.ChatMessagePart{
|
||||||
|
Type: "image",
|
||||||
|
ImageURL: &openai.ChatMessageImageURL{URL: c.ImageURL},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if oneImageWasFound {
|
||||||
|
result = append(result, openai.ChatCompletionMessage{
|
||||||
|
Role: m.Role,
|
||||||
|
MultiContent: content,
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
for _, c := range content {
|
||||||
|
result = append(result, openai.ChatCompletionMessage{
|
||||||
|
Role: m.Role,
|
||||||
|
Content: c.Text,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.InputText != "" {
|
||||||
|
result = append(result, openai.ChatCompletionMessage{
|
||||||
|
Role: "user",
|
||||||
|
Content: r.InputText,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReasoningConfig represents reasoning configuration options
|
||||||
|
type ReasoningConfig struct {
|
||||||
|
Effort *string `json:"effort,omitempty"`
|
||||||
|
Summary *string `json:"summary,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TextConfig represents text configuration options
|
||||||
|
type TextConfig struct {
|
||||||
|
Format *FormatConfig `json:"format,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// FormatConfig represents format configuration options
|
||||||
|
type FormatConfig struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResponseMessage represents a message in the response
|
||||||
|
type ResponseMessage struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content []MessageContentItem `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageContentItem represents a content item in a message
|
||||||
|
type MessageContentItem struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Text string `json:"text"`
|
||||||
|
Annotations []interface{} `json:"annotations"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// UsageInfo represents token usage information
|
||||||
|
type UsageInfo struct {
|
||||||
|
InputTokens int `json:"input_tokens"`
|
||||||
|
InputTokensDetails TokenDetails `json:"input_tokens_details"`
|
||||||
|
OutputTokens int `json:"output_tokens"`
|
||||||
|
OutputTokensDetails TokenDetails `json:"output_tokens_details"`
|
||||||
|
TotalTokens int `json:"total_tokens"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TokenDetails represents details about token usage
|
||||||
|
type TokenDetails struct {
|
||||||
|
CachedTokens int `json:"cached_tokens"`
|
||||||
|
ReasoningTokens int `json:"reasoning_tokens,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResponseBody represents the structure of the OpenAI API response
|
||||||
|
type ResponseBody struct {
|
||||||
|
ID string `json:"id"`
|
||||||
|
Object string `json:"object"`
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Error interface{} `json:"error"`
|
||||||
|
IncompleteDetails interface{} `json:"incomplete_details"`
|
||||||
|
Instructions interface{} `json:"instructions"`
|
||||||
|
MaxOutputTokens interface{} `json:"max_output_tokens"`
|
||||||
|
Model string `json:"model"`
|
||||||
|
Output []ResponseMessage `json:"output"`
|
||||||
|
ParallelToolCalls bool `json:"parallel_tool_calls"`
|
||||||
|
PreviousResponseID interface{} `json:"previous_response_id"`
|
||||||
|
Reasoning ReasoningConfig `json:"reasoning"`
|
||||||
|
Store bool `json:"store"`
|
||||||
|
Temperature float64 `json:"temperature"`
|
||||||
|
Text TextConfig `json:"text"`
|
||||||
|
ToolChoice string `json:"tool_choice"`
|
||||||
|
Tools []interface{} `json:"tools"`
|
||||||
|
TopP float64 `json:"top_p"`
|
||||||
|
Truncation string `json:"truncation"`
|
||||||
|
Usage UsageInfo `json:"usage"`
|
||||||
|
User interface{} `json:"user"`
|
||||||
|
Metadata map[string]interface{} `json:"metadata"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InputMessage represents a user input message
|
||||||
|
type InputMessage struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content []ContentItem `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentItem represents an item in a content array
|
||||||
|
type ContentItem struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
ImageURL string `json:"image_url,omitempty"`
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user