Compare commits
13 Commits
feat/goals
...
feat/githu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c6117e1c22 | ||
|
|
2cee722dd1 | ||
|
|
97ef7acec0 | ||
|
|
77189b6114 | ||
|
|
c32d315910 | ||
|
|
606ffd8275 | ||
|
|
601dba3fc4 | ||
|
|
00ab476a77 | ||
|
|
906079cbbb | ||
|
|
808d9c981c | ||
|
|
2b79c99dd7 | ||
|
|
77905ed3cd | ||
|
|
60c249f19a |
2
.github/workflows/tests.yml
vendored
2
.github/workflows/tests.yml
vendored
@@ -3,7 +3,7 @@ name: Run Go Tests
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- '**'
|
- 'main'
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- '**'
|
- '**'
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -3,7 +3,7 @@ IMAGE_NAME?=webui
|
|||||||
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||||
|
|
||||||
prepare-tests:
|
prepare-tests:
|
||||||
docker compose up -d
|
docker compose up -d --build
|
||||||
|
|
||||||
cleanup-tests:
|
cleanup-tests:
|
||||||
docker compose down
|
docker compose down
|
||||||
|
|||||||
127
README.md
127
README.md
@@ -45,14 +45,129 @@ LocalAGI ensures your data stays exactly where you want it—on your hardware. N
|
|||||||
git clone https://github.com/mudler/LocalAGI
|
git clone https://github.com/mudler/LocalAGI
|
||||||
cd LocalAGI
|
cd LocalAGI
|
||||||
|
|
||||||
# CPU setup
|
# CPU setup (default)
|
||||||
docker compose up -f docker-compose.yml
|
docker compose up
|
||||||
|
|
||||||
# GPU setup
|
# NVIDIA GPU setup
|
||||||
docker compose up -f docker-compose.gpu.yml
|
docker compose -f docker-compose.nvidia.yaml up
|
||||||
|
|
||||||
|
# Intel GPU setup (for Intel Arc and integrated GPUs)
|
||||||
|
docker compose -f docker-compose.intel.yaml up
|
||||||
|
|
||||||
|
# Start with a specific model (see available models in models.localai.io, or localai.io to use any model in huggingface)
|
||||||
|
MODEL_NAME=gemma-3-12b-it docker compose up
|
||||||
|
|
||||||
|
# NVIDIA GPU setup with custom multimodal and image models
|
||||||
|
MODEL_NAME=gemma-3-12b-it \
|
||||||
|
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||||
|
IMAGE_MODEL=flux.1-dev \
|
||||||
|
docker compose -f docker-compose.nvidia.yaml up
|
||||||
```
|
```
|
||||||
|
|
||||||
Access your agents at `http://localhost:8080`
|
Now you can access and manage your agents at [http://localhost:8080](http://localhost:8080)
|
||||||
|
|
||||||
|
## 📚🆕 Local Stack Family
|
||||||
|
|
||||||
|
🆕 LocalAI is now part of a comprehensive suite of AI tools designed to work together:
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<a href="https://github.com/mudler/LocalAI">
|
||||||
|
<img src="https://raw.githubusercontent.com/mudler/LocalAI/refs/heads/rebranding/core/http/static/logo_horizontal.png" width="300" alt="LocalAI Logo">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<h3><a href="https://github.com/mudler/LocalRecall">LocalAI</a></h3>
|
||||||
|
<p>LocalAI is the free, Open Source OpenAI alternative. LocalAI act as a drop-in replacement REST API that's compatible with OpenAI API specifications for local AI inferencing. Does not require GPU.</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<a href="https://github.com/mudler/LocalRecall">
|
||||||
|
<img src="https://raw.githubusercontent.com/mudler/LocalRecall/refs/heads/main/static/localrecall_horizontal.png" width="300" alt="LocalRecall Logo">
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
|
<td width="50%" valign="top">
|
||||||
|
<h3><a href="https://github.com/mudler/LocalRecall">LocalRecall</a></h3>
|
||||||
|
<p>A REST-ful API and knowledge base management system that provides persistent memory and storage capabilities for AI agents.</p>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
## 🖥️ Hardware Configurations
|
||||||
|
|
||||||
|
LocalAGI supports multiple hardware configurations through Docker Compose profiles:
|
||||||
|
|
||||||
|
### CPU (Default)
|
||||||
|
- No special configuration needed
|
||||||
|
- Runs on any system with Docker
|
||||||
|
- Best for testing and development
|
||||||
|
- Supports text models only
|
||||||
|
|
||||||
|
### NVIDIA GPU
|
||||||
|
- Requires NVIDIA GPU and drivers
|
||||||
|
- Uses CUDA for acceleration
|
||||||
|
- Best for high-performance inference
|
||||||
|
- Supports text, multimodal, and image generation models
|
||||||
|
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
|
||||||
|
- Default models:
|
||||||
|
- Text: `arcee-agent`
|
||||||
|
- Multimodal: `minicpm-v-2_6`
|
||||||
|
- Image: `flux.1-dev`
|
||||||
|
- Environment variables:
|
||||||
|
- `MODEL_NAME`: Text model to use
|
||||||
|
- `MULTIMODAL_MODEL`: Multimodal model to use
|
||||||
|
- `IMAGE_MODEL`: Image generation model to use
|
||||||
|
- `LOCALAI_SINGLE_ACTIVE_BACKEND`: Set to `true` to enable single active backend mode
|
||||||
|
|
||||||
|
### Intel GPU
|
||||||
|
- Supports Intel Arc and integrated GPUs
|
||||||
|
- Uses SYCL for acceleration
|
||||||
|
- Best for Intel-based systems
|
||||||
|
- Supports text, multimodal, and image generation models
|
||||||
|
- Run with: `docker compose -f docker-compose.intel.yaml up`
|
||||||
|
- Default models:
|
||||||
|
- Text: `arcee-agent`
|
||||||
|
- Multimodal: `minicpm-v-2_6`
|
||||||
|
- Image: `sd-1.5-ggml`
|
||||||
|
- Environment variables:
|
||||||
|
- `MODEL_NAME`: Text model to use
|
||||||
|
- `MULTIMODAL_MODEL`: Multimodal model to use
|
||||||
|
- `IMAGE_MODEL`: Image generation model to use
|
||||||
|
- `LOCALAI_SINGLE_ACTIVE_BACKEND`: Set to `true` to enable single active backend mode
|
||||||
|
|
||||||
|
## Customize models
|
||||||
|
|
||||||
|
You can customize the models used by LocalAGI by setting environment variables when running docker-compose. For example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# CPU with custom model
|
||||||
|
MODEL_NAME=gemma-3-12b-it docker compose up
|
||||||
|
|
||||||
|
# NVIDIA GPU with custom models
|
||||||
|
MODEL_NAME=gemma-3-12b-it \
|
||||||
|
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||||
|
IMAGE_MODEL=flux.1-dev \
|
||||||
|
docker compose -f docker-compose.nvidia.yaml up
|
||||||
|
|
||||||
|
# Intel GPU with custom models
|
||||||
|
MODEL_NAME=gemma-3-12b-it \
|
||||||
|
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||||
|
IMAGE_MODEL=sd-1.5-ggml \
|
||||||
|
docker compose -f docker-compose.intel.yaml up
|
||||||
|
```
|
||||||
|
|
||||||
|
If no models are specified, it will use the defaults:
|
||||||
|
- Text model: `arcee-agent`
|
||||||
|
- Multimodal model: `minicpm-v-2_6`
|
||||||
|
- Image model: `flux.1-dev` (NVIDIA) or `sd-1.5-ggml` (Intel)
|
||||||
|
|
||||||
|
Good (relatively small) models that have been tested are:
|
||||||
|
|
||||||
|
- `qwen_qwq-32b` (best in co-ordinating agents)
|
||||||
|
- `gemma-3-12b-it`
|
||||||
|
- `gemma-3-27b-it`
|
||||||
|
|
||||||
## 🏆 Why Choose LocalAGI?
|
## 🏆 Why Choose LocalAGI?
|
||||||
|
|
||||||
@@ -98,6 +213,8 @@ Explore detailed documentation including:
|
|||||||
|
|
||||||
### Environment Configuration
|
### Environment Configuration
|
||||||
|
|
||||||
|
LocalAGI supports environment configurations. Note that these environment variables needs to be specified in the localagi container in the docker-compose file to have effect.
|
||||||
|
|
||||||
| Variable | What It Does |
|
| Variable | What It Does |
|
||||||
|----------|--------------|
|
|----------|--------------|
|
||||||
| `LOCALAGI_MODEL` | Your go-to model |
|
| `LOCALAGI_MODEL` | Your go-to model |
|
||||||
|
|||||||
@@ -10,12 +10,11 @@ import (
|
|||||||
// NewGoal creates a new intention action
|
// NewGoal creates a new intention action
|
||||||
// The inention action is special as it tries to identify
|
// The inention action is special as it tries to identify
|
||||||
// a tool to use and a reasoning over to use it
|
// a tool to use and a reasoning over to use it
|
||||||
func NewGoal(s ...string) *GoalAction {
|
func NewGoal() *GoalAction {
|
||||||
return &GoalAction{tools: s}
|
return &GoalAction{}
|
||||||
}
|
}
|
||||||
|
|
||||||
type GoalAction struct {
|
type GoalAction struct {
|
||||||
tools []string
|
|
||||||
}
|
}
|
||||||
type GoalResponse struct {
|
type GoalResponse struct {
|
||||||
Goal string `json:"goal"`
|
Goal string `json:"goal"`
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ func (a *PlanAction) Plannable() bool {
|
|||||||
func (a *PlanAction) Definition() types.ActionDefinition {
|
func (a *PlanAction) Definition() types.ActionDefinition {
|
||||||
return types.ActionDefinition{
|
return types.ActionDefinition{
|
||||||
Name: PlanActionName,
|
Name: PlanActionName,
|
||||||
Description: "Use this tool for solving complex tasks that involves calling more tools in sequence.",
|
Description: "Use it for situations that involves doing more actions in sequence.",
|
||||||
Properties: map[string]jsonschema.Definition{
|
Properties: map[string]jsonschema.Definition{
|
||||||
"subtasks": {
|
"subtasks": {
|
||||||
Type: jsonschema.Array,
|
Type: jsonschema.Array,
|
||||||
|
|||||||
@@ -24,15 +24,27 @@ type decisionResult struct {
|
|||||||
func (a *Agent) decision(
|
func (a *Agent) decision(
|
||||||
ctx context.Context,
|
ctx context.Context,
|
||||||
conversation []openai.ChatCompletionMessage,
|
conversation []openai.ChatCompletionMessage,
|
||||||
tools []openai.Tool, toolchoice any, maxRetries int) (*decisionResult, error) {
|
tools []openai.Tool, toolchoice string, maxRetries int) (*decisionResult, error) {
|
||||||
|
|
||||||
|
var choice *openai.ToolChoice
|
||||||
|
|
||||||
|
if toolchoice != "" {
|
||||||
|
choice = &openai.ToolChoice{
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: openai.ToolFunction{Name: toolchoice},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var lastErr error
|
var lastErr error
|
||||||
for attempts := 0; attempts < maxRetries; attempts++ {
|
for attempts := 0; attempts < maxRetries; attempts++ {
|
||||||
decision := openai.ChatCompletionRequest{
|
decision := openai.ChatCompletionRequest{
|
||||||
Model: a.options.LLMAPI.Model,
|
Model: a.options.LLMAPI.Model,
|
||||||
Messages: conversation,
|
Messages: conversation,
|
||||||
Tools: tools,
|
Tools: tools,
|
||||||
ToolChoice: toolchoice,
|
}
|
||||||
|
|
||||||
|
if choice != nil {
|
||||||
|
decision.ToolChoice = *choice
|
||||||
}
|
}
|
||||||
|
|
||||||
resp, err := a.client.CreateChatCompletion(ctx, decision)
|
resp, err := a.client.CreateChatCompletion(ctx, decision)
|
||||||
@@ -42,6 +54,9 @@ func (a *Agent) decision(
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
jsonResp, _ := json.Marshal(resp)
|
||||||
|
xlog.Debug("Decision response", "response", string(jsonResp))
|
||||||
|
|
||||||
if len(resp.Choices) != 1 {
|
if len(resp.Choices) != 1 {
|
||||||
lastErr = fmt.Errorf("no choices: %d", len(resp.Choices))
|
lastErr = fmt.Errorf("no choices: %d", len(resp.Choices))
|
||||||
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", lastErr)
|
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", lastErr)
|
||||||
@@ -189,10 +204,7 @@ func (a *Agent) generateParameters(ctx context.Context, pickTemplate string, act
|
|||||||
result, attemptErr = a.decision(ctx,
|
result, attemptErr = a.decision(ctx,
|
||||||
cc,
|
cc,
|
||||||
a.availableActions().ToTools(),
|
a.availableActions().ToTools(),
|
||||||
openai.ToolChoice{
|
act.Definition().Name.String(),
|
||||||
Type: openai.ToolTypeFunction,
|
|
||||||
Function: openai.ToolFunction{Name: act.Definition().Name.String()},
|
|
||||||
},
|
|
||||||
maxAttempts,
|
maxAttempts,
|
||||||
)
|
)
|
||||||
if attemptErr == nil && result.actionParams != nil {
|
if attemptErr == nil && result.actionParams != nil {
|
||||||
@@ -253,6 +265,7 @@ func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction
|
|||||||
|
|
||||||
params, err := a.generateParameters(ctx, pickTemplate, subTaskAction, conv, subTaskReasoning, maxRetries)
|
params, err := a.generateParameters(ctx, pickTemplate, subTaskAction, conv, subTaskReasoning, maxRetries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
xlog.Error("error generating action's parameters", "error", err)
|
||||||
return conv, fmt.Errorf("error generating action's parameters: %w", err)
|
return conv, fmt.Errorf("error generating action's parameters: %w", err)
|
||||||
|
|
||||||
}
|
}
|
||||||
@@ -282,6 +295,7 @@ func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction
|
|||||||
|
|
||||||
result, err := a.runAction(ctx, subTaskAction, actionParams)
|
result, err := a.runAction(ctx, subTaskAction, actionParams)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
xlog.Error("error running action", "error", err)
|
||||||
return conv, fmt.Errorf("error running action: %w", err)
|
return conv, fmt.Errorf("error running action: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -367,7 +381,9 @@ func (a *Agent) prepareHUD() (promptHUD *PromptHUD) {
|
|||||||
func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.ChatCompletionMessage, maxRetries int) (types.Action, types.ActionParams, string, error) {
|
func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.ChatCompletionMessage, maxRetries int) (types.Action, types.ActionParams, string, error) {
|
||||||
c := messages
|
c := messages
|
||||||
|
|
||||||
xlog.Debug("[pickAction] picking action", "messages", messages)
|
xlog.Debug("[pickAction] picking action starts", "messages", messages)
|
||||||
|
|
||||||
|
// Identify the goal of this conversation
|
||||||
|
|
||||||
if !a.options.forceReasoning {
|
if !a.options.forceReasoning {
|
||||||
xlog.Debug("not forcing reasoning")
|
xlog.Debug("not forcing reasoning")
|
||||||
@@ -376,7 +392,7 @@ func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.
|
|||||||
thought, err := a.decision(ctx,
|
thought, err := a.decision(ctx,
|
||||||
messages,
|
messages,
|
||||||
a.availableActions().ToTools(),
|
a.availableActions().ToTools(),
|
||||||
nil,
|
"",
|
||||||
maxRetries)
|
maxRetries)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, "", err
|
return nil, nil, "", err
|
||||||
@@ -415,120 +431,83 @@ func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.
|
|||||||
}, c...)
|
}, c...)
|
||||||
}
|
}
|
||||||
|
|
||||||
actionsID := []string{}
|
thought, err := a.decision(ctx,
|
||||||
|
c,
|
||||||
|
types.Actions{action.NewReasoning()}.ToTools(),
|
||||||
|
action.NewReasoning().Definition().Name.String(), maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
originalReasoning := ""
|
||||||
|
response := &action.ReasoningResponse{}
|
||||||
|
if thought.actionParams != nil {
|
||||||
|
if err := thought.actionParams.Unmarshal(response); err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
originalReasoning = response.Reasoning
|
||||||
|
}
|
||||||
|
if thought.message != "" {
|
||||||
|
originalReasoning = thought.message
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("[pickAction] picking action", "messages", c)
|
||||||
|
// thought, err := a.askLLM(ctx,
|
||||||
|
// c,
|
||||||
|
|
||||||
|
actionsID := []string{"reply"}
|
||||||
for _, m := range a.availableActions() {
|
for _, m := range a.availableActions() {
|
||||||
actionsID = append(actionsID, m.Definition().Name.String())
|
actionsID = append(actionsID, m.Definition().Name.String())
|
||||||
}
|
}
|
||||||
|
|
||||||
// thoughtPromptStringBuilder := strings.Builder{}
|
xlog.Debug("[pickAction] actionsID", "actionsID", actionsID)
|
||||||
// thoughtPromptStringBuilder.WriteString("You have to pick an action based on the conversation and the prompt. Describe the full reasoning process for your choice. Here is a list of actions: ")
|
|
||||||
// for _, m := range a.availableActions() {
|
|
||||||
// thoughtPromptStringBuilder.WriteString(
|
|
||||||
// m.Definition().Name.String() + ": " + m.Definition().Description + "\n",
|
|
||||||
// )
|
|
||||||
// }
|
|
||||||
|
|
||||||
// thoughtPromptStringBuilder.WriteString("To not use any action, respond with 'none'")
|
|
||||||
|
|
||||||
//thoughtPromptStringBuilder.WriteString("\n\nConversation: " + Messages(c).RemoveIf(func(msg openai.ChatCompletionMessage) bool {
|
|
||||||
// return msg.Role == "system"
|
|
||||||
//}).String())
|
|
||||||
|
|
||||||
//thoughtPrompt := thoughtPromptStringBuilder.String()
|
|
||||||
|
|
||||||
//thoughtConv := []openai.ChatCompletionMessage{}
|
|
||||||
|
|
||||||
thought, err := a.askLLM(ctx,
|
|
||||||
c,
|
|
||||||
maxRetries,
|
|
||||||
)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, "", err
|
|
||||||
}
|
|
||||||
originalReasoning := thought.Content
|
|
||||||
|
|
||||||
// From the thought, get the action call
|
|
||||||
// Get all the available actions IDs
|
|
||||||
|
|
||||||
// by grammar, let's decide if we have achieved the goal
|
|
||||||
// 1. analyze response and check if goal is achieved
|
|
||||||
|
|
||||||
params, err := a.decision(ctx,
|
|
||||||
[]openai.ChatCompletionMessage{
|
|
||||||
{
|
|
||||||
Role: "system",
|
|
||||||
Content: "Extract an action to perform from the following reasoning: ",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Role: "user",
|
|
||||||
Content: originalReasoning,
|
|
||||||
}},
|
|
||||||
types.Actions{action.NewGoal()}.ToTools(),
|
|
||||||
action.NewGoal().Definition().Name, maxRetries)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, "", fmt.Errorf("failed to get the action tool parameters: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
goalResponse := action.GoalResponse{}
|
|
||||||
err = params.actionParams.Unmarshal(&goalResponse)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if goalResponse.Achieved {
|
|
||||||
xlog.Debug("[pickAction] goal achieved", "goal", goalResponse.Goal)
|
|
||||||
return nil, nil, "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// if the goal is not achieved, pick an action
|
|
||||||
xlog.Debug("[pickAction] goal not achieved", "goal", goalResponse.Goal)
|
|
||||||
|
|
||||||
xlog.Debug("[pickAction] thought", "conv", c, "originalReasoning", originalReasoning)
|
|
||||||
|
|
||||||
|
intentionsTools := action.NewIntention(actionsID...)
|
||||||
// TODO: FORCE to select ana ction here
|
// TODO: FORCE to select ana ction here
|
||||||
// NOTE: we do not give the full conversation here to pick the action
|
// NOTE: we do not give the full conversation here to pick the action
|
||||||
// to avoid hallucinations
|
// to avoid hallucinations
|
||||||
params, err = a.decision(ctx,
|
|
||||||
[]openai.ChatCompletionMessage{
|
// Extract an action
|
||||||
{
|
params, err := a.decision(ctx,
|
||||||
Role: "system",
|
append(c, openai.ChatCompletionMessage{
|
||||||
Content: "Extract an action to perform from the following reasoning: ",
|
Role: "system",
|
||||||
},
|
Content: "Pick the relevant action given the following reasoning: " + originalReasoning,
|
||||||
{
|
}),
|
||||||
Role: "user",
|
types.Actions{intentionsTools}.ToTools(),
|
||||||
Content: originalReasoning,
|
intentionsTools.Definition().Name.String(), maxRetries)
|
||||||
}},
|
|
||||||
a.availableActions().ToTools(),
|
|
||||||
nil, maxRetries)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, "", fmt.Errorf("failed to get the action tool parameters: %v", err)
|
return nil, nil, "", fmt.Errorf("failed to get the action tool parameters: %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
chosenAction := a.availableActions().Find(params.actioName)
|
if params.actionParams == nil {
|
||||||
|
xlog.Debug("[pickAction] no action params found")
|
||||||
|
return nil, nil, params.message, nil
|
||||||
|
}
|
||||||
|
|
||||||
// xlog.Debug("[pickAction] params", "params", params)
|
actionChoice := action.IntentResponse{}
|
||||||
|
err = params.actionParams.Unmarshal(&actionChoice)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
// if params.actionParams == nil {
|
if actionChoice.Tool == "" || actionChoice.Tool == "reply" {
|
||||||
// return nil, nil, params.message, nil
|
xlog.Debug("[pickAction] no action found, replying")
|
||||||
// }
|
return nil, nil, "", nil
|
||||||
|
}
|
||||||
|
|
||||||
// xlog.Debug("[pickAction] actionChoice", "actionChoice", params.actionParams, "message", params.message)
|
chosenAction := a.availableActions().Find(actionChoice.Tool)
|
||||||
|
|
||||||
// actionChoice := action.IntentResponse{}
|
xlog.Debug("[pickAction] chosenAction", "chosenAction", chosenAction, "actionName", actionChoice.Tool)
|
||||||
|
|
||||||
// err = params.actionParams.Unmarshal(&actionChoice)
|
// // Let's double check if the action is correct by asking the LLM to judge it
|
||||||
// if err != nil {
|
|
||||||
// return nil, nil, "", err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if actionChoice.Tool == "" || actionChoice.Tool == "none" {
|
// if chosenAction!= nil {
|
||||||
// return nil, nil, "", nil
|
// promptString:= "Given the following goal and thoughts, is the action correct? \n\n"
|
||||||
// }
|
// promptString+= fmt.Sprintf("Goal: %s\n", goalResponse.Goal)
|
||||||
|
// promptString+= fmt.Sprintf("Thoughts: %s\n", originalReasoning)
|
||||||
|
// promptString+= fmt.Sprintf("Action: %s\n", chosenAction.Definition().Name.String())
|
||||||
|
// promptString+= fmt.Sprintf("Action description: %s\n", chosenAction.Definition().Description)
|
||||||
|
// promptString+= fmt.Sprintf("Action parameters: %s\n", params.actionParams)
|
||||||
|
|
||||||
// // Find the action
|
|
||||||
// chosenAction := a.availableActions().Find(actionChoice.Tool)
|
|
||||||
// if chosenAction == nil {
|
|
||||||
// return nil, nil, "", fmt.Errorf("no action found for intent:" + actionChoice.Tool)
|
|
||||||
// }
|
// }
|
||||||
|
|
||||||
return chosenAction, nil, originalReasoning, nil
|
return chosenAction, nil, originalReasoning, nil
|
||||||
|
|||||||
@@ -249,7 +249,7 @@ func (a *Agent) runAction(ctx context.Context, chosenAction types.Action, params
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
xlog.Info("Running action", "action", chosenAction.Definition().Name, "agent", a.Character.Name)
|
xlog.Info("[runAction] Running action", "action", chosenAction.Definition().Name, "agent", a.Character.Name, "params", params.String())
|
||||||
|
|
||||||
if chosenAction.Definition().Name.Is(action.StateActionName) {
|
if chosenAction.Definition().Name.Is(action.StateActionName) {
|
||||||
// We need to store the result in the state
|
// We need to store the result in the state
|
||||||
@@ -270,6 +270,8 @@ func (a *Agent) runAction(ctx context.Context, chosenAction types.Action, params
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
xlog.Debug("[runAction] Action result", "action", chosenAction.Definition().Name, "params", params.String(), "result", result.Result)
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -603,7 +605,13 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
|
|||||||
var err error
|
var err error
|
||||||
conv, err = a.handlePlanning(job.GetContext(), job, chosenAction, actionParams, reasoning, pickTemplate, conv)
|
conv, err = a.handlePlanning(job.GetContext(), job, chosenAction, actionParams, reasoning, pickTemplate, conv)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
job.Result.Finish(fmt.Errorf("error running action: %w", err))
|
xlog.Error("error handling planning", "error", err)
|
||||||
|
//job.Result.Conversation = conv
|
||||||
|
//job.Result.SetResponse(msg.Content)
|
||||||
|
a.reply(job, role, append(conv, openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: fmt.Sprintf("Error handling planning: %v", err),
|
||||||
|
}), actionParams, chosenAction, reasoning)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -689,26 +697,6 @@ func (a *Agent) consumeJob(job *types.Job, role string) {
|
|||||||
job.SetNextAction(&followingAction, &followingParams, reasoning)
|
job.SetNextAction(&followingAction, &followingParams, reasoning)
|
||||||
a.consumeJob(job, role)
|
a.consumeJob(job, role)
|
||||||
return
|
return
|
||||||
} else if followingAction == nil {
|
|
||||||
xlog.Info("Not following another action", "agent", a.Character.Name)
|
|
||||||
|
|
||||||
if !a.options.forceReasoning {
|
|
||||||
xlog.Info("Finish conversation with reasoning", "reasoning", reasoning, "agent", a.Character.Name)
|
|
||||||
|
|
||||||
msg := openai.ChatCompletionMessage{
|
|
||||||
Role: "assistant",
|
|
||||||
Content: reasoning,
|
|
||||||
}
|
|
||||||
|
|
||||||
conv = append(conv, msg)
|
|
||||||
job.Result.SetResponse(msg.Content)
|
|
||||||
job.Result.Conversation = conv
|
|
||||||
job.Result.AddFinalizer(func(conv []openai.ChatCompletionMessage) {
|
|
||||||
a.saveCurrentConversation(conv)
|
|
||||||
})
|
|
||||||
job.Result.Finish(nil)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
a.reply(job, role, conv, actionParams, chosenAction, reasoning)
|
a.reply(job, role, conv, actionParams, chosenAction, reasoning)
|
||||||
|
|||||||
@@ -126,6 +126,8 @@ var _ = Describe("Agent test", func() {
|
|||||||
agent, err := New(
|
agent, err := New(
|
||||||
WithLLMAPIURL(apiURL),
|
WithLLMAPIURL(apiURL),
|
||||||
WithModel(testModel),
|
WithModel(testModel),
|
||||||
|
EnableForceReasoning,
|
||||||
|
WithTimeout("10m"),
|
||||||
WithLoopDetectionSteps(3),
|
WithLoopDetectionSteps(3),
|
||||||
// WithRandomIdentity(),
|
// WithRandomIdentity(),
|
||||||
WithActions(&TestAction{response: map[string]string{
|
WithActions(&TestAction{response: map[string]string{
|
||||||
@@ -174,7 +176,7 @@ var _ = Describe("Agent test", func() {
|
|||||||
agent, err := New(
|
agent, err := New(
|
||||||
WithLLMAPIURL(apiURL),
|
WithLLMAPIURL(apiURL),
|
||||||
WithModel(testModel),
|
WithModel(testModel),
|
||||||
|
WithTimeout("10m"),
|
||||||
// WithRandomIdentity(),
|
// WithRandomIdentity(),
|
||||||
WithActions(&TestAction{response: map[string]string{
|
WithActions(&TestAction{response: map[string]string{
|
||||||
"boston": testActionResult,
|
"boston": testActionResult,
|
||||||
@@ -199,6 +201,7 @@ var _ = Describe("Agent test", func() {
|
|||||||
agent, err := New(
|
agent, err := New(
|
||||||
WithLLMAPIURL(apiURL),
|
WithLLMAPIURL(apiURL),
|
||||||
WithModel(testModel),
|
WithModel(testModel),
|
||||||
|
WithTimeout("10m"),
|
||||||
EnableHUD,
|
EnableHUD,
|
||||||
// EnableStandaloneJob,
|
// EnableStandaloneJob,
|
||||||
// WithRandomIdentity(),
|
// WithRandomIdentity(),
|
||||||
@@ -235,7 +238,7 @@ var _ = Describe("Agent test", func() {
|
|||||||
defer agent.Stop()
|
defer agent.Stop()
|
||||||
|
|
||||||
result := agent.Ask(
|
result := agent.Ask(
|
||||||
types.WithText("plan a trip to San Francisco from Venice, Italy"),
|
types.WithText("Thoroughly plan a trip to San Francisco from Venice, Italy; check flight times, visa requirements and whether electrical items are allowed in cabin luggage."),
|
||||||
)
|
)
|
||||||
Expect(len(result.State)).To(BeNumerically(">", 1))
|
Expect(len(result.State)).To(BeNumerically(">", 1))
|
||||||
|
|
||||||
@@ -257,6 +260,7 @@ var _ = Describe("Agent test", func() {
|
|||||||
WithLLMAPIURL(apiURL),
|
WithLLMAPIURL(apiURL),
|
||||||
WithModel(testModel),
|
WithModel(testModel),
|
||||||
WithLLMAPIKey(apiKeyURL),
|
WithLLMAPIKey(apiKeyURL),
|
||||||
|
WithTimeout("10m"),
|
||||||
WithNewConversationSubscriber(func(m openai.ChatCompletionMessage) {
|
WithNewConversationSubscriber(func(m openai.ChatCompletionMessage) {
|
||||||
mu.Lock()
|
mu.Lock()
|
||||||
message = m
|
message = m
|
||||||
@@ -271,7 +275,7 @@ var _ = Describe("Agent test", func() {
|
|||||||
EnableStandaloneJob,
|
EnableStandaloneJob,
|
||||||
EnableHUD,
|
EnableHUD,
|
||||||
WithPeriodicRuns("1s"),
|
WithPeriodicRuns("1s"),
|
||||||
WithPermanentGoal("use the new_conversation tool"),
|
WithPermanentGoal("use the new_conversation tool to initiate a conversation with the user"),
|
||||||
// EnableStandaloneJob,
|
// EnableStandaloneJob,
|
||||||
// WithRandomIdentity(),
|
// WithRandomIdentity(),
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ Available Tools:
|
|||||||
const reSelfEvalTemplate = pickSelfTemplate
|
const reSelfEvalTemplate = pickSelfTemplate
|
||||||
|
|
||||||
const pickActionTemplate = hudTemplate + `
|
const pickActionTemplate = hudTemplate + `
|
||||||
Your only task is to analyze the situation and determine a goal and the best tool to use, or just a final response if we have fullfilled the goal.
|
Your only task is to analyze the conversation and determine a goal and the best tool to use, or just a final response if we have fullfilled the goal.
|
||||||
|
|
||||||
Guidelines:
|
Guidelines:
|
||||||
1. Review the current state, what was done already and context
|
1. Review the current state, what was done already and context
|
||||||
|
|||||||
@@ -1,75 +0,0 @@
|
|||||||
services:
|
|
||||||
localai:
|
|
||||||
# See https://localai.io/basics/container/#standard-container-images for
|
|
||||||
# a list of available container images (or build your own with the provided Dockerfile)
|
|
||||||
# Available images with CUDA, ROCm, SYCL, Vulkan
|
|
||||||
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
|
||||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
|
||||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
|
||||||
command:
|
|
||||||
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
|
||||||
- arcee-agent # (smaller)
|
|
||||||
- granite-embedding-107m-multilingual
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
|
||||||
interval: 60s
|
|
||||||
timeout: 10m
|
|
||||||
retries: 120
|
|
||||||
ports:
|
|
||||||
- 8081:8080
|
|
||||||
environment:
|
|
||||||
- DEBUG=true
|
|
||||||
#- LOCALAI_API_KEY=sk-1234567890
|
|
||||||
volumes:
|
|
||||||
- ./volumes/models:/build/models:cached
|
|
||||||
- ./volumes/images:/tmp/generated/images
|
|
||||||
devices:
|
|
||||||
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
|
||||||
- /dev/dri/card1
|
|
||||||
- /dev/dri/renderD129
|
|
||||||
|
|
||||||
localrecall:
|
|
||||||
image: quay.io/mudler/localrecall:main
|
|
||||||
ports:
|
|
||||||
- 8080
|
|
||||||
environment:
|
|
||||||
- COLLECTION_DB_PATH=/db
|
|
||||||
- EMBEDDING_MODEL=granite-embedding-107m-multilingual
|
|
||||||
- FILE_ASSETS=/assets
|
|
||||||
- OPENAI_API_KEY=sk-1234567890
|
|
||||||
- OPENAI_BASE_URL=http://localai:8080
|
|
||||||
volumes:
|
|
||||||
- ./volumes/localrag/db:/db
|
|
||||||
- ./volumes/localrag/assets/:/assets
|
|
||||||
|
|
||||||
localrecall-healthcheck:
|
|
||||||
depends_on:
|
|
||||||
localrecall:
|
|
||||||
condition: service_started
|
|
||||||
image: busybox
|
|
||||||
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
|
||||||
|
|
||||||
localagi:
|
|
||||||
depends_on:
|
|
||||||
localai:
|
|
||||||
condition: service_healthy
|
|
||||||
localrecall-healthcheck:
|
|
||||||
condition: service_completed_successfully
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile.webui
|
|
||||||
ports:
|
|
||||||
- 8080:3000
|
|
||||||
image: quay.io/mudler/localagi:master
|
|
||||||
environment:
|
|
||||||
- LOCALAGI_MODEL=arcee-agent
|
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
|
||||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
|
||||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
|
||||||
- LOCALAGI_STATE_DIR=/pool
|
|
||||||
- LOCALAGI_TIMEOUT=5m
|
|
||||||
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
|
||||||
extra_hosts:
|
|
||||||
- "host.docker.internal:host-gateway"
|
|
||||||
volumes:
|
|
||||||
- ./volumes/localagi/:/pool
|
|
||||||
@@ -1,85 +0,0 @@
|
|||||||
services:
|
|
||||||
localai:
|
|
||||||
# See https://localai.io/basics/container/#standard-container-images for
|
|
||||||
# a list of available container images (or build your own with the provided Dockerfile)
|
|
||||||
# Available images with CUDA, ROCm, SYCL, Vulkan
|
|
||||||
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
|
||||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
|
||||||
image: localai/localai:master-gpu-nvidia-cuda-12
|
|
||||||
command:
|
|
||||||
- mlabonne_gemma-3-27b-it-abliterated
|
|
||||||
- qwen_qwq-32b
|
|
||||||
# Other good alternative options:
|
|
||||||
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
|
||||||
# - arcee-agent
|
|
||||||
- granite-embedding-107m-multilingual
|
|
||||||
- flux.1-dev
|
|
||||||
- minicpm-v-2_6
|
|
||||||
environment:
|
|
||||||
# Enable if you have a single GPU which don't fit all the models
|
|
||||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
|
||||||
- DEBUG=true
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
|
||||||
interval: 10s
|
|
||||||
timeout: 20m
|
|
||||||
retries: 20
|
|
||||||
ports:
|
|
||||||
- 8081:8080
|
|
||||||
volumes:
|
|
||||||
- ./volumes/models:/build/models:cached
|
|
||||||
- ./volumes/images:/tmp/generated/images
|
|
||||||
deploy:
|
|
||||||
resources:
|
|
||||||
reservations:
|
|
||||||
devices:
|
|
||||||
- driver: nvidia
|
|
||||||
count: 1
|
|
||||||
capabilities: [gpu]
|
|
||||||
localrecall:
|
|
||||||
image: quay.io/mudler/localrecall:main
|
|
||||||
ports:
|
|
||||||
- 8080
|
|
||||||
environment:
|
|
||||||
- COLLECTION_DB_PATH=/db
|
|
||||||
- EMBEDDING_MODEL=granite-embedding-107m-multilingual
|
|
||||||
- FILE_ASSETS=/assets
|
|
||||||
- OPENAI_API_KEY=sk-1234567890
|
|
||||||
- OPENAI_BASE_URL=http://localai:8080
|
|
||||||
volumes:
|
|
||||||
- ./volumes/localrag/db:/db
|
|
||||||
- ./volumes/localrag/assets/:/assets
|
|
||||||
|
|
||||||
localrecall-healthcheck:
|
|
||||||
depends_on:
|
|
||||||
localrecall:
|
|
||||||
condition: service_started
|
|
||||||
image: busybox
|
|
||||||
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
|
||||||
|
|
||||||
localagi:
|
|
||||||
depends_on:
|
|
||||||
localai:
|
|
||||||
condition: service_healthy
|
|
||||||
localrecall-healthcheck:
|
|
||||||
condition: service_completed_successfully
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: Dockerfile.webui
|
|
||||||
ports:
|
|
||||||
- 8080:3000
|
|
||||||
image: quay.io/mudler/localagi:master
|
|
||||||
environment:
|
|
||||||
- LOCALAGI_MODEL=qwen_qwq-32b
|
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
|
||||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
|
||||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
|
||||||
- LOCALAGI_STATE_DIR=/pool
|
|
||||||
- LOCALAGI_TIMEOUT=5m
|
|
||||||
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
|
||||||
- LOCALAGI_MULTIMODAL_MODEL=minicpm-v-2_6
|
|
||||||
- LOCALAGI_IMAGE_MODEL=flux.1-dev
|
|
||||||
extra_hosts:
|
|
||||||
- "host.docker.internal:host-gateway"
|
|
||||||
volumes:
|
|
||||||
- ./volumes/localagi/:/pool
|
|
||||||
33
docker-compose.intel.yaml
Normal file
33
docker-compose.intel.yaml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
services:
|
||||||
|
localai:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localai
|
||||||
|
environment:
|
||||||
|
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||||
|
- DEBUG=true
|
||||||
|
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||||
|
devices:
|
||||||
|
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
||||||
|
- /dev/dri/card1
|
||||||
|
- /dev/dri/renderD129
|
||||||
|
command:
|
||||||
|
- ${MODEL_NAME:-arcee-agent}
|
||||||
|
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||||
|
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
||||||
|
- granite-embedding-107m-multilingual
|
||||||
|
|
||||||
|
localrecall:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localrecall
|
||||||
|
|
||||||
|
localrecall-healthcheck:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localrecall-healthcheck
|
||||||
|
|
||||||
|
localagi:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localagi
|
||||||
31
docker-compose.nvidia.yaml
Normal file
31
docker-compose.nvidia.yaml
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
services:
|
||||||
|
localai:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localai
|
||||||
|
environment:
|
||||||
|
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||||
|
- DEBUG=true
|
||||||
|
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
count: 1
|
||||||
|
capabilities: [gpu]
|
||||||
|
|
||||||
|
localrecall:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localrecall
|
||||||
|
|
||||||
|
localrecall-healthcheck:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localrecall-healthcheck
|
||||||
|
|
||||||
|
localagi:
|
||||||
|
extends:
|
||||||
|
file: docker-compose.yaml
|
||||||
|
service: localagi
|
||||||
@@ -7,7 +7,9 @@ services:
|
|||||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
image: localai/localai:master-ffmpeg-core
|
image: localai/localai:master-ffmpeg-core
|
||||||
command:
|
command:
|
||||||
- arcee-agent # (smaller)
|
- ${MODEL_NAME:-arcee-agent}
|
||||||
|
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||||
|
- ${IMAGE_MODEL:-flux.1-dev}
|
||||||
- granite-embedding-107m-multilingual
|
- granite-embedding-107m-multilingual
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
@@ -23,14 +25,6 @@ services:
|
|||||||
- ./volumes/models:/build/models:cached
|
- ./volumes/models:/build/models:cached
|
||||||
- ./volumes/images:/tmp/generated/images
|
- ./volumes/images:/tmp/generated/images
|
||||||
|
|
||||||
# decomment the following piece if running with Nvidia GPUs
|
|
||||||
# deploy:
|
|
||||||
# resources:
|
|
||||||
# reservations:
|
|
||||||
# devices:
|
|
||||||
# - driver: nvidia
|
|
||||||
# count: 1
|
|
||||||
# capabilities: [gpu]
|
|
||||||
localrecall:
|
localrecall:
|
||||||
image: quay.io/mudler/localrecall:main
|
image: quay.io/mudler/localrecall:main
|
||||||
ports:
|
ports:
|
||||||
@@ -65,7 +59,9 @@ services:
|
|||||||
- 8080:3000
|
- 8080:3000
|
||||||
#image: quay.io/mudler/localagi:master
|
#image: quay.io/mudler/localagi:master
|
||||||
environment:
|
environment:
|
||||||
- LOCALAGI_MODEL=arcee-agent
|
- LOCALAGI_MODEL=${MODEL_NAME:-arcee-agent}
|
||||||
|
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||||
|
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
||||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
@@ -75,4 +71,4 @@ services:
|
|||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
volumes:
|
volumes:
|
||||||
- ./volumes/localagi/:/pool
|
- ./volumes/localagi/:/pool
|
||||||
@@ -29,6 +29,8 @@ const (
|
|||||||
ActionGithubPRReader = "github-pr-reader"
|
ActionGithubPRReader = "github-pr-reader"
|
||||||
ActionGithubPRCommenter = "github-pr-commenter"
|
ActionGithubPRCommenter = "github-pr-commenter"
|
||||||
ActionGithubPRReviewer = "github-pr-reviewer"
|
ActionGithubPRReviewer = "github-pr-reviewer"
|
||||||
|
ActionGithubPRCreator = "github-pr-creator"
|
||||||
|
ActionGithubGetAllContent = "github-get-all-repository-content"
|
||||||
ActionGithubREADME = "github-readme"
|
ActionGithubREADME = "github-readme"
|
||||||
ActionScraper = "scraper"
|
ActionScraper = "scraper"
|
||||||
ActionWikipedia = "wikipedia"
|
ActionWikipedia = "wikipedia"
|
||||||
@@ -49,12 +51,14 @@ var AvailableActions = []string{
|
|||||||
ActionGithubIssueCloser,
|
ActionGithubIssueCloser,
|
||||||
ActionGithubIssueSearcher,
|
ActionGithubIssueSearcher,
|
||||||
ActionGithubRepositoryGet,
|
ActionGithubRepositoryGet,
|
||||||
|
ActionGithubGetAllContent,
|
||||||
ActionGithubRepositoryCreateOrUpdate,
|
ActionGithubRepositoryCreateOrUpdate,
|
||||||
ActionGithubIssueReader,
|
ActionGithubIssueReader,
|
||||||
ActionGithubIssueCommenter,
|
ActionGithubIssueCommenter,
|
||||||
ActionGithubPRReader,
|
ActionGithubPRReader,
|
||||||
ActionGithubPRCommenter,
|
ActionGithubPRCommenter,
|
||||||
ActionGithubPRReviewer,
|
ActionGithubPRReviewer,
|
||||||
|
ActionGithubPRCreator,
|
||||||
ActionGithubREADME,
|
ActionGithubREADME,
|
||||||
ActionScraper,
|
ActionScraper,
|
||||||
ActionBrowse,
|
ActionBrowse,
|
||||||
@@ -118,6 +122,10 @@ func Action(name, agentName string, config map[string]string, pool *state.AgentP
|
|||||||
a = actions.NewGithubPRCommenter(config)
|
a = actions.NewGithubPRCommenter(config)
|
||||||
case ActionGithubPRReviewer:
|
case ActionGithubPRReviewer:
|
||||||
a = actions.NewGithubPRReviewer(config)
|
a = actions.NewGithubPRReviewer(config)
|
||||||
|
case ActionGithubPRCreator:
|
||||||
|
a = actions.NewGithubPRCreator(config)
|
||||||
|
case ActionGithubGetAllContent:
|
||||||
|
a = actions.NewGithubRepositoryGetAllContent(config)
|
||||||
case ActionGithubIssueCommenter:
|
case ActionGithubIssueCommenter:
|
||||||
a = actions.NewGithubIssueCommenter(config)
|
a = actions.NewGithubIssueCommenter(config)
|
||||||
case ActionGithubRepositoryGet:
|
case ActionGithubRepositoryGet:
|
||||||
@@ -201,6 +209,11 @@ func ActionsConfigMeta() []config.FieldGroup {
|
|||||||
Label: "GitHub Repository Get Content",
|
Label: "GitHub Repository Get Content",
|
||||||
Fields: actions.GithubRepositoryGetContentConfigMeta(),
|
Fields: actions.GithubRepositoryGetContentConfigMeta(),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "github-get-all-repository-content",
|
||||||
|
Label: "GitHub Get All Repository Content",
|
||||||
|
Fields: actions.GithubRepositoryGetAllContentConfigMeta(),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "github-repository-create-or-update-content",
|
Name: "github-repository-create-or-update-content",
|
||||||
Label: "GitHub Repository Create/Update Content",
|
Label: "GitHub Repository Create/Update Content",
|
||||||
@@ -226,6 +239,11 @@ func ActionsConfigMeta() []config.FieldGroup {
|
|||||||
Label: "GitHub PR Reviewer",
|
Label: "GitHub PR Reviewer",
|
||||||
Fields: actions.GithubPRReviewerConfigMeta(),
|
Fields: actions.GithubPRReviewerConfigMeta(),
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "github-pr-creator",
|
||||||
|
Label: "GitHub PR Creator",
|
||||||
|
Fields: actions.GithubPRCreatorConfigMeta(),
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "twitter-post",
|
Name: "twitter-post",
|
||||||
Label: "Twitter Post",
|
Label: "Twitter Post",
|
||||||
|
|||||||
315
services/actions/githubprcreator.go
Normal file
315
services/actions/githubprcreator.go
Normal file
@@ -0,0 +1,315 @@
|
|||||||
|
package actions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/google/go-github/v69/github"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/config"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GithubPRCreator struct {
|
||||||
|
token, repository, owner, customActionName, defaultBranch string
|
||||||
|
client *github.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGithubPRCreator(config map[string]string) *GithubPRCreator {
|
||||||
|
client := github.NewClient(nil).WithAuthToken(config["token"])
|
||||||
|
|
||||||
|
return &GithubPRCreator{
|
||||||
|
client: client,
|
||||||
|
token: config["token"],
|
||||||
|
repository: config["repository"],
|
||||||
|
owner: config["owner"],
|
||||||
|
customActionName: config["customActionName"],
|
||||||
|
defaultBranch: config["defaultBranch"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubPRCreator) createOrUpdateBranch(ctx context.Context, branchName string) error {
|
||||||
|
// Get the latest commit SHA from the default branch
|
||||||
|
ref, _, err := g.client.Git.GetRef(ctx, g.owner, g.repository, "refs/heads/"+g.defaultBranch)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to get reference: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to get the branch if it exists
|
||||||
|
_, resp, err := g.client.Git.GetRef(ctx, g.owner, g.repository, "refs/heads/"+branchName)
|
||||||
|
if err != nil {
|
||||||
|
// If branch doesn't exist, create it
|
||||||
|
if resp != nil && resp.StatusCode == 404 {
|
||||||
|
newRef := &github.Reference{
|
||||||
|
Ref: github.String("refs/heads/" + branchName),
|
||||||
|
Object: &github.GitObject{SHA: ref.Object.SHA},
|
||||||
|
}
|
||||||
|
_, _, err = g.client.Git.CreateRef(ctx, g.owner, g.repository, newRef)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create branch: %w", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to check branch existence: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Branch exists, update it to the latest commit
|
||||||
|
updateRef := &github.Reference{
|
||||||
|
Ref: github.String("refs/heads/" + branchName),
|
||||||
|
Object: &github.GitObject{SHA: ref.Object.SHA},
|
||||||
|
}
|
||||||
|
_, _, err = g.client.Git.UpdateRef(ctx, g.owner, g.repository, updateRef, true)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to update branch: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubPRCreator) createOrUpdateFile(ctx context.Context, branch string, filePath string, content string, message string) error {
|
||||||
|
// Get the current file content if it exists
|
||||||
|
var sha *string
|
||||||
|
fileContent, _, _, err := g.client.Repositories.GetContents(ctx, g.owner, g.repository, filePath, &github.RepositoryContentGetOptions{
|
||||||
|
Ref: branch,
|
||||||
|
})
|
||||||
|
if err == nil && fileContent != nil {
|
||||||
|
sha = fileContent.SHA
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create or update the file
|
||||||
|
_, _, err = g.client.Repositories.CreateFile(ctx, g.owner, g.repository, filePath, &github.RepositoryContentFileOptions{
|
||||||
|
Message: &message,
|
||||||
|
Content: []byte(content),
|
||||||
|
Branch: &branch,
|
||||||
|
SHA: sha,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to create/update file: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubPRCreator) Run(ctx context.Context, params types.ActionParams) (types.ActionResult, error) {
|
||||||
|
result := struct {
|
||||||
|
Repository string `json:"repository"`
|
||||||
|
Owner string `json:"owner"`
|
||||||
|
Branch string `json:"branch"`
|
||||||
|
Title string `json:"title"`
|
||||||
|
Body string `json:"body"`
|
||||||
|
BaseBranch string `json:"base_branch"`
|
||||||
|
Files []struct {
|
||||||
|
Path string `json:"path"`
|
||||||
|
Content string `json:"content"`
|
||||||
|
} `json:"files"`
|
||||||
|
}{}
|
||||||
|
err := params.Unmarshal(&result)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to unmarshal params: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if g.repository != "" && g.owner != "" {
|
||||||
|
result.Repository = g.repository
|
||||||
|
result.Owner = g.owner
|
||||||
|
}
|
||||||
|
|
||||||
|
if result.BaseBranch == "" {
|
||||||
|
result.BaseBranch = g.defaultBranch
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create or update branch
|
||||||
|
err = g.createOrUpdateBranch(ctx, result.Branch)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to create/update branch: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create or update files
|
||||||
|
for _, file := range result.Files {
|
||||||
|
err = g.createOrUpdateFile(ctx, result.Branch, file.Path, file.Content, fmt.Sprintf("Update %s", file.Path))
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to update file %s: %w", file.Path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if PR already exists for this branch
|
||||||
|
prs, _, err := g.client.PullRequests.List(ctx, result.Owner, result.Repository, &github.PullRequestListOptions{
|
||||||
|
State: "open",
|
||||||
|
Head: fmt.Sprintf("%s:%s", result.Owner, result.Branch),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to list pull requests: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(prs) > 0 {
|
||||||
|
// Update existing PR
|
||||||
|
pr := prs[0]
|
||||||
|
update := &github.PullRequest{
|
||||||
|
Title: &result.Title,
|
||||||
|
Body: &result.Body,
|
||||||
|
}
|
||||||
|
updatedPR, _, err := g.client.PullRequests.Edit(ctx, result.Owner, result.Repository, pr.GetNumber(), update)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to update pull request: %w", err)
|
||||||
|
}
|
||||||
|
return types.ActionResult{
|
||||||
|
Result: fmt.Sprintf("Updated pull request #%d: %s", updatedPR.GetNumber(), updatedPR.GetHTMLURL()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create new pull request
|
||||||
|
newPR := &github.NewPullRequest{
|
||||||
|
Title: &result.Title,
|
||||||
|
Body: &result.Body,
|
||||||
|
Head: &result.Branch,
|
||||||
|
Base: &result.BaseBranch,
|
||||||
|
}
|
||||||
|
|
||||||
|
createdPR, _, err := g.client.PullRequests.Create(ctx, result.Owner, result.Repository, newPR)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to create pull request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return types.ActionResult{
|
||||||
|
Result: fmt.Sprintf("Created pull request #%d: %s", createdPR.GetNumber(), createdPR.GetHTMLURL()),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubPRCreator) Definition() types.ActionDefinition {
|
||||||
|
actionName := "create_github_pr"
|
||||||
|
if g.customActionName != "" {
|
||||||
|
actionName = g.customActionName
|
||||||
|
}
|
||||||
|
description := "Create a GitHub pull request with file changes"
|
||||||
|
if g.repository != "" && g.owner != "" && g.defaultBranch != "" {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(actionName),
|
||||||
|
Description: description,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"branch": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The name of the new branch to create",
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The title of the pull request",
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The body/description of the pull request",
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"path": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The path of the file to create/update",
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The content of the file",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"path", "content"},
|
||||||
|
},
|
||||||
|
Description: "Array of files to create or update",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"branch", "title", "files"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(actionName),
|
||||||
|
Description: description,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"branch": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The name of the new branch to create",
|
||||||
|
},
|
||||||
|
"title": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The title of the pull request",
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The body/description of the pull request",
|
||||||
|
},
|
||||||
|
"base_branch": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The base branch to merge into (defaults to configured default branch)",
|
||||||
|
},
|
||||||
|
"files": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"path": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The path of the file to create/update",
|
||||||
|
},
|
||||||
|
"content": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The content of the file",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"path", "content"},
|
||||||
|
},
|
||||||
|
Description: "Array of files to create or update",
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The repository to create the pull request in",
|
||||||
|
},
|
||||||
|
"owner": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The owner of the repository",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"branch", "title", "files", "repository", "owner"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *GithubPRCreator) Plannable() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GithubPRCreatorConfigMeta returns the metadata for GitHub PR Creator action configuration fields
|
||||||
|
func GithubPRCreatorConfigMeta() []config.Field {
|
||||||
|
return []config.Field{
|
||||||
|
{
|
||||||
|
Name: "token",
|
||||||
|
Label: "GitHub Token",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: true,
|
||||||
|
HelpText: "GitHub API token with repository access",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "repository",
|
||||||
|
Label: "Repository",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: false,
|
||||||
|
HelpText: "GitHub repository name",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "owner",
|
||||||
|
Label: "Owner",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: false,
|
||||||
|
HelpText: "GitHub repository owner",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "customActionName",
|
||||||
|
Label: "Custom Action Name",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
HelpText: "Custom name for this action",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "defaultBranch",
|
||||||
|
Label: "Default Branch",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: false,
|
||||||
|
HelpText: "Default branch to create PRs against (defaults to main)",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
118
services/actions/githubprcreator_test.go
Normal file
118
services/actions/githubprcreator_test.go
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
package actions_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/services/actions"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("GithubPRCreator", func() {
|
||||||
|
var (
|
||||||
|
action *actions.GithubPRCreator
|
||||||
|
ctx context.Context
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
ctx = context.Background()
|
||||||
|
|
||||||
|
// Check for required environment variables
|
||||||
|
token := os.Getenv("GITHUB_TOKEN")
|
||||||
|
repo := os.Getenv("TEST_REPOSITORY")
|
||||||
|
owner := os.Getenv("TEST_OWNER")
|
||||||
|
|
||||||
|
// Skip tests if any required environment variable is missing
|
||||||
|
if token == "" || repo == "" || owner == "" {
|
||||||
|
Skip("Skipping GitHub PR creator tests: required environment variables not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
config := map[string]string{
|
||||||
|
"token": token,
|
||||||
|
"repository": repo,
|
||||||
|
"owner": owner,
|
||||||
|
"customActionName": "test_create_pr",
|
||||||
|
"defaultBranch": "main",
|
||||||
|
}
|
||||||
|
|
||||||
|
action = actions.NewGithubPRCreator(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Creating pull requests", func() {
|
||||||
|
It("should successfully create a pull request with file changes", func() {
|
||||||
|
params := map[string]interface{}{
|
||||||
|
"branch": "test-branch",
|
||||||
|
"title": "Test PR",
|
||||||
|
"body": "This is a test pull request",
|
||||||
|
"base_branch": "main",
|
||||||
|
"files": []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"path": "test.txt",
|
||||||
|
"content": "This is a test file",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := action.Run(ctx, params)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(result.Result).To(ContainSubstring("pull request #"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle missing required fields", func() {
|
||||||
|
params := map[string]interface{}{
|
||||||
|
"title": "Test PR",
|
||||||
|
"body": "This is a test pull request",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := action.Run(ctx, params)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Action Definition", func() {
|
||||||
|
It("should return correct action definition", func() {
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Name.String()).To(Equal("test_create_pr"))
|
||||||
|
Expect(def.Description).To(ContainSubstring("Create a GitHub pull request with file changes"))
|
||||||
|
Expect(def.Properties).To(HaveKey("branch"))
|
||||||
|
Expect(def.Properties).To(HaveKey("title"))
|
||||||
|
Expect(def.Properties).To(HaveKey("files"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle custom action name", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
"customActionName": "custom_action_name",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubPRCreator(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Name.String()).To(Equal("custom_action_name"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Configuration", func() {
|
||||||
|
It("should handle missing repository and owner in config", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubPRCreator(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Properties).To(HaveKey("repository"))
|
||||||
|
Expect(def.Properties).To(HaveKey("owner"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle provided repository and owner in config", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
"repository": "test-repo",
|
||||||
|
"defaultBranch": "main",
|
||||||
|
"owner": "test-owner",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubPRCreator(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Properties).NotTo(HaveKey("repository"))
|
||||||
|
Expect(def.Properties).NotTo(HaveKey("owner"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -128,11 +128,13 @@ func (g *GithubPRReviewer) Run(ctx context.Context, params types.ActionParams) (
|
|||||||
}
|
}
|
||||||
|
|
||||||
actionResult := fmt.Sprintf(
|
actionResult := fmt.Sprintf(
|
||||||
"Pull request https://github.com/%s/%s/pull/%d reviewed successfully with status: %s",
|
"Pull request https://github.com/%s/%s/pull/%d reviewed successfully with status: %s, comments: %v, message: %s",
|
||||||
result.Owner,
|
result.Owner,
|
||||||
result.Repository,
|
result.Repository,
|
||||||
result.PRNumber,
|
result.PRNumber,
|
||||||
strings.ToLower(result.ReviewAction),
|
strings.ToLower(result.ReviewAction),
|
||||||
|
result.Comments,
|
||||||
|
result.ReviewComment,
|
||||||
)
|
)
|
||||||
|
|
||||||
return types.ActionResult{Result: actionResult}, nil
|
return types.ActionResult{Result: actionResult}, nil
|
||||||
|
|||||||
103
services/actions/githubprreviewer_test.go
Normal file
103
services/actions/githubprreviewer_test.go
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
package actions_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/services/actions"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("GithubPRReviewer", func() {
|
||||||
|
var (
|
||||||
|
reviewer *actions.GithubPRReviewer
|
||||||
|
ctx context.Context
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
ctx = context.Background()
|
||||||
|
|
||||||
|
// Check for required environment variables
|
||||||
|
token := os.Getenv("GITHUB_TOKEN")
|
||||||
|
repo := os.Getenv("TEST_REPOSITORY")
|
||||||
|
owner := os.Getenv("TEST_OWNER")
|
||||||
|
prNumber := os.Getenv("TEST_PR_NUMBER")
|
||||||
|
|
||||||
|
// Skip tests if any required environment variable is missing
|
||||||
|
if token == "" || repo == "" || owner == "" || prNumber == "" {
|
||||||
|
Skip("Skipping GitHub PR reviewer tests: required environment variables not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
config := map[string]string{
|
||||||
|
"token": token,
|
||||||
|
"repository": repo,
|
||||||
|
"owner": owner,
|
||||||
|
"customActionName": "test_review_github_pr",
|
||||||
|
}
|
||||||
|
|
||||||
|
reviewer = actions.NewGithubPRReviewer(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Reviewing a PR", func() {
|
||||||
|
It("should successfully submit a review with comments", func() {
|
||||||
|
prNumber := os.Getenv("TEST_PR_NUMBER")
|
||||||
|
Expect(prNumber).NotTo(BeEmpty())
|
||||||
|
|
||||||
|
params := types.ActionParams{
|
||||||
|
"pr_number": prNumber,
|
||||||
|
"review_comment": "Test review comment from integration test",
|
||||||
|
"review_action": "COMMENT",
|
||||||
|
"comments": []map[string]interface{}{
|
||||||
|
{
|
||||||
|
"file": "README.md",
|
||||||
|
"line": 1,
|
||||||
|
"comment": "Test line comment from integration test",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := reviewer.Run(ctx, params)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(result.Result).To(ContainSubstring("reviewed successfully"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle invalid PR number", func() {
|
||||||
|
params := types.ActionParams{
|
||||||
|
"pr_number": 999999,
|
||||||
|
"review_comment": "Test review comment",
|
||||||
|
"review_action": "COMMENT",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := reviewer.Run(ctx, params)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
Expect(result.Result).To(ContainSubstring("not found"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle invalid review action", func() {
|
||||||
|
prNumber := os.Getenv("TEST_PR_NUMBER")
|
||||||
|
Expect(prNumber).NotTo(BeEmpty())
|
||||||
|
|
||||||
|
params := types.ActionParams{
|
||||||
|
"pr_number": prNumber,
|
||||||
|
"review_comment": "Test review comment",
|
||||||
|
"review_action": "INVALID_ACTION",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := reviewer.Run(ctx, params)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Action Definition", func() {
|
||||||
|
It("should return correct action definition", func() {
|
||||||
|
def := reviewer.Definition()
|
||||||
|
Expect(def.Name).To(Equal(types.ActionDefinitionName("test_review_github_pr")))
|
||||||
|
Expect(def.Description).To(ContainSubstring("Review a GitHub pull request"))
|
||||||
|
Expect(def.Properties).To(HaveKey("pr_number"))
|
||||||
|
Expect(def.Properties).To(HaveKey("review_action"))
|
||||||
|
Expect(def.Properties).To(HaveKey("comments"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
174
services/actions/githubrepositorygetallcontent.go
Normal file
174
services/actions/githubrepositorygetallcontent.go
Normal file
@@ -0,0 +1,174 @@
|
|||||||
|
package actions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/google/go-github/v69/github"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/config"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GithubRepositoryGetAllContent struct {
|
||||||
|
token, repository, owner, customActionName string
|
||||||
|
client *github.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewGithubRepositoryGetAllContent(config map[string]string) *GithubRepositoryGetAllContent {
|
||||||
|
client := github.NewClient(nil).WithAuthToken(config["token"])
|
||||||
|
|
||||||
|
return &GithubRepositoryGetAllContent{
|
||||||
|
client: client,
|
||||||
|
token: config["token"],
|
||||||
|
repository: config["repository"],
|
||||||
|
owner: config["owner"],
|
||||||
|
customActionName: config["customActionName"],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubRepositoryGetAllContent) getContentRecursively(ctx context.Context, path string) (string, error) {
|
||||||
|
var result strings.Builder
|
||||||
|
|
||||||
|
// Get content at the current path
|
||||||
|
_, directoryContent, _, err := g.client.Repositories.GetContents(ctx, g.owner, g.repository, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error getting content at path %s: %w", path, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each item in the directory
|
||||||
|
for _, item := range directoryContent {
|
||||||
|
if item.GetType() == "dir" {
|
||||||
|
// Recursively get content for subdirectories
|
||||||
|
subContent, err := g.getContentRecursively(ctx, item.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
result.WriteString(subContent)
|
||||||
|
} else if item.GetType() == "file" {
|
||||||
|
// Get file content
|
||||||
|
fileContent, _, _, err := g.client.Repositories.GetContents(ctx, g.owner, g.repository, item.GetPath(), nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error getting file content for %s: %w", item.GetPath(), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := fileContent.GetContent()
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("error decoding content for %s: %w", item.GetPath(), err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add file content to result with clear markers
|
||||||
|
result.WriteString(fmt.Sprintf("\n--- START FILE: %s ---\n", item.GetPath()))
|
||||||
|
result.WriteString(content)
|
||||||
|
result.WriteString(fmt.Sprintf("\n--- END FILE: %s ---\n", item.GetPath()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubRepositoryGetAllContent) Run(ctx context.Context, params types.ActionParams) (types.ActionResult, error) {
|
||||||
|
result := struct {
|
||||||
|
Repository string `json:"repository"`
|
||||||
|
Owner string `json:"owner"`
|
||||||
|
Path string `json:"path,omitempty"`
|
||||||
|
}{}
|
||||||
|
err := params.Unmarshal(&result)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("failed to unmarshal params: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if g.repository != "" && g.owner != "" {
|
||||||
|
result.Repository = g.repository
|
||||||
|
result.Owner = g.owner
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start from root if no path specified
|
||||||
|
if result.Path == "" {
|
||||||
|
result.Path = "."
|
||||||
|
}
|
||||||
|
|
||||||
|
content, err := g.getContentRecursively(ctx, result.Path)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return types.ActionResult{Result: content}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *GithubRepositoryGetAllContent) Definition() types.ActionDefinition {
|
||||||
|
actionName := "get_all_github_repository_content"
|
||||||
|
if g.customActionName != "" {
|
||||||
|
actionName = g.customActionName
|
||||||
|
}
|
||||||
|
description := "Get all content of a GitHub repository recursively"
|
||||||
|
if g.repository != "" && g.owner != "" {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(actionName),
|
||||||
|
Description: description,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"path": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "Optional path to start from (defaults to repository root)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(actionName),
|
||||||
|
Description: description,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"path": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "Optional path to start from (defaults to repository root)",
|
||||||
|
},
|
||||||
|
"repository": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The repository to get content from",
|
||||||
|
},
|
||||||
|
"owner": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The owner of the repository",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"repository", "owner"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *GithubRepositoryGetAllContent) Plannable() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// GithubRepositoryGetAllContentConfigMeta returns the metadata for GitHub Repository Get All Content action configuration fields
|
||||||
|
func GithubRepositoryGetAllContentConfigMeta() []config.Field {
|
||||||
|
return []config.Field{
|
||||||
|
{
|
||||||
|
Name: "token",
|
||||||
|
Label: "GitHub Token",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: true,
|
||||||
|
HelpText: "GitHub API token with repository access",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "repository",
|
||||||
|
Label: "Repository",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: false,
|
||||||
|
HelpText: "GitHub repository name",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "owner",
|
||||||
|
Label: "Owner",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: false,
|
||||||
|
HelpText: "GitHub repository owner",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "customActionName",
|
||||||
|
Label: "Custom Action Name",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
HelpText: "Custom name for this action",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
114
services/actions/githubrepositorygetallcontent_test.go
Normal file
114
services/actions/githubrepositorygetallcontent_test.go
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
package actions_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/services/actions"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("GithubRepositoryGetAllContent", func() {
|
||||||
|
var (
|
||||||
|
action *actions.GithubRepositoryGetAllContent
|
||||||
|
ctx context.Context
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
ctx = context.Background()
|
||||||
|
|
||||||
|
// Check for required environment variables
|
||||||
|
token := os.Getenv("GITHUB_TOKEN")
|
||||||
|
repo := os.Getenv("TEST_REPOSITORY")
|
||||||
|
owner := os.Getenv("TEST_OWNER")
|
||||||
|
|
||||||
|
// Skip tests if any required environment variable is missing
|
||||||
|
if token == "" || repo == "" || owner == "" {
|
||||||
|
Skip("Skipping GitHub repository get all content tests: required environment variables not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
config := map[string]string{
|
||||||
|
"token": token,
|
||||||
|
"repository": repo,
|
||||||
|
"owner": owner,
|
||||||
|
"customActionName": "test_get_all_content",
|
||||||
|
}
|
||||||
|
|
||||||
|
action = actions.NewGithubRepositoryGetAllContent(config)
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Getting repository content", func() {
|
||||||
|
It("should successfully get content from root directory with proper file markers", func() {
|
||||||
|
params := map[string]interface{}{
|
||||||
|
"path": ".",
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := action.Run(ctx, params)
|
||||||
|
Expect(err).NotTo(HaveOccurred())
|
||||||
|
Expect(result.Result).NotTo(BeEmpty())
|
||||||
|
|
||||||
|
// Verify file markers
|
||||||
|
Expect(result.Result).To(ContainSubstring("--- START FILE:"))
|
||||||
|
Expect(result.Result).To(ContainSubstring("--- END FILE:"))
|
||||||
|
|
||||||
|
// Verify markers are properly paired
|
||||||
|
startCount := strings.Count(result.Result, "--- START FILE:")
|
||||||
|
endCount := strings.Count(result.Result, "--- END FILE:")
|
||||||
|
Expect(startCount).To(Equal(endCount), "Number of start and end markers should match")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle non-existent path", func() {
|
||||||
|
params := map[string]interface{}{
|
||||||
|
"path": "non-existent-path",
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := action.Run(ctx, params)
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Action Definition", func() {
|
||||||
|
It("should return correct action definition", func() {
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Name.String()).To(Equal("test_get_all_content"))
|
||||||
|
Expect(def.Description).To(ContainSubstring("Get all content of a GitHub repository recursively"))
|
||||||
|
Expect(def.Properties).To(HaveKey("path"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle custom action name", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
"customActionName": "custom_action_name",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubRepositoryGetAllContent(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Name.String()).To(Equal("custom_action_name"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Describe("Configuration", func() {
|
||||||
|
It("should handle missing repository and owner in config", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubRepositoryGetAllContent(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Properties).To(HaveKey("repository"))
|
||||||
|
Expect(def.Properties).To(HaveKey("owner"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle provided repository and owner in config", func() {
|
||||||
|
config := map[string]string{
|
||||||
|
"token": "test-token",
|
||||||
|
"repository": "test-repo",
|
||||||
|
"owner": "test-owner",
|
||||||
|
}
|
||||||
|
action := actions.NewGithubRepositoryGetAllContent(config)
|
||||||
|
def := action.Definition()
|
||||||
|
Expect(def.Properties).NotTo(HaveKey("repository"))
|
||||||
|
Expect(def.Properties).NotTo(HaveKey("owner"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
@@ -1,13 +1,12 @@
|
|||||||
import { useState, useEffect } from 'react';
|
import { useState, useEffect } from 'react';
|
||||||
import { useParams, Link, useNavigate } from 'react-router-dom';
|
import { useParams, Link } from 'react-router-dom';
|
||||||
|
|
||||||
function AgentStatus() {
|
function AgentStatus() {
|
||||||
const { name } = useParams();
|
const { name } = useParams();
|
||||||
const navigate = useNavigate();
|
|
||||||
const [statusData, setStatusData] = useState(null);
|
const [statusData, setStatusData] = useState(null);
|
||||||
const [loading, setLoading] = useState(true);
|
const [loading, setLoading] = useState(true);
|
||||||
const [error, setError] = useState(null);
|
const [error, setError] = useState(null);
|
||||||
const [eventSource, setEventSource] = useState(null);
|
const [_eventSource, setEventSource] = useState(null);
|
||||||
const [liveUpdates, setLiveUpdates] = useState([]);
|
const [liveUpdates, setLiveUpdates] = useState([]);
|
||||||
|
|
||||||
// Update document title
|
// Update document title
|
||||||
@@ -49,7 +48,7 @@ function AgentStatus() {
|
|||||||
const data = JSON.parse(event.data);
|
const data = JSON.parse(event.data);
|
||||||
setLiveUpdates(prev => [data, ...prev.slice(0, 19)]); // Keep last 20 updates
|
setLiveUpdates(prev => [data, ...prev.slice(0, 19)]); // Keep last 20 updates
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error parsing SSE data:', err);
|
setLiveUpdates(prev => [event.data, ...prev.slice(0, 19)]);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -129,23 +128,9 @@ function AgentStatus() {
|
|||||||
<h2 className="text-sm font-semibold mb-2">Agent Action:</h2>
|
<h2 className="text-sm font-semibold mb-2">Agent Action:</h2>
|
||||||
<div className="status-details">
|
<div className="status-details">
|
||||||
<div className="status-row">
|
<div className="status-row">
|
||||||
<span className="status-label">Result:</span>
|
<span className="status-label">{index}</span>
|
||||||
<span className="status-value">{formatValue(item.Result)}</span>
|
<span className="status-value">{formatValue(item)}</span>
|
||||||
</div>
|
</div>
|
||||||
<div className="status-row">
|
|
||||||
<span className="status-label">Action:</span>
|
|
||||||
<span className="status-value">{formatValue(item.Action)}</span>
|
|
||||||
</div>
|
|
||||||
<div className="status-row">
|
|
||||||
<span className="status-label">Parameters:</span>
|
|
||||||
<span className="status-value pre-wrap">{formatValue(item.Params)}</span>
|
|
||||||
</div>
|
|
||||||
{item.Reasoning && (
|
|
||||||
<div className="status-row">
|
|
||||||
<span className="status-label">Reasoning:</span>
|
|
||||||
<span className="status-value reasoning">{formatValue(item.Reasoning)}</span>
|
|
||||||
</div>
|
|
||||||
)}
|
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ export default defineConfig(({ mode }) => {
|
|||||||
'/status': backendUrl,
|
'/status': backendUrl,
|
||||||
'/action': backendUrl,
|
'/action': backendUrl,
|
||||||
'/actions': backendUrl,
|
'/actions': backendUrl,
|
||||||
|
'/avatars': backendUrl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ import (
|
|||||||
"crypto/subtle"
|
"crypto/subtle"
|
||||||
"embed"
|
"embed"
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@@ -238,9 +239,20 @@ func (app *App) registerRoutes(pool *state.AgentPool, webapp *fiber.App) {
|
|||||||
history = &state.Status{ActionResults: []types.ActionState{}}
|
history = &state.Status{ActionResults: []types.ActionState{}}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
entries := []string{}
|
||||||
|
for _, h := range Reverse(history.Results()) {
|
||||||
|
entries = append(entries, fmt.Sprintf(
|
||||||
|
"Result: %v Action: %v Params: %v Reasoning: %v",
|
||||||
|
h.Result,
|
||||||
|
h.Action.Definition().Name,
|
||||||
|
h.Params,
|
||||||
|
h.Reasoning,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
return c.JSON(fiber.Map{
|
return c.JSON(fiber.Map{
|
||||||
"Name": c.Params("name"),
|
"Name": c.Params("name"),
|
||||||
"History": Reverse(history.Results()),
|
"History": entries,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user