Compare commits
1 Commits
dependabot
...
mock-llm
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5698d0b832 |
81
.github/workflows/image.yml
vendored
81
.github/workflows/image.yml
vendored
@@ -78,7 +78,8 @@ jobs:
|
||||
VERSION=${{ steps.prep.outputs.binary_version }}
|
||||
context: ./
|
||||
file: ./Dockerfile.webui
|
||||
platforms: linux/amd64,linux/arm64
|
||||
#platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
#tags: ${{ steps.prep.outputs.tags }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
@@ -151,81 +152,9 @@ jobs:
|
||||
VERSION=${{ steps.prep.outputs.binary_version }}
|
||||
context: ./
|
||||
file: ./Dockerfile.mcpbox
|
||||
platforms: linux/amd64,linux/arm64
|
||||
#platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
#tags: ${{ steps.prep.outputs.tags }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
sshbox-build:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Prepare
|
||||
id: prep
|
||||
run: |
|
||||
DOCKER_IMAGE=quay.io/mudler/localagi-sshbox
|
||||
# Use branch name as default
|
||||
VERSION=${GITHUB_REF#refs/heads/}
|
||||
BINARY_VERSION=$(git describe --always --tags --dirty)
|
||||
SHORTREF=${GITHUB_SHA::8}
|
||||
# If this is git tag, use the tag name as a docker tag
|
||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||
VERSION=${GITHUB_REF#refs/tags/}
|
||||
fi
|
||||
TAGS="${DOCKER_IMAGE}:${VERSION},${DOCKER_IMAGE}:${SHORTREF}"
|
||||
# If the VERSION looks like a version number, assume that
|
||||
# this is the most recent version of the image and also
|
||||
# tag it 'latest'.
|
||||
if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
||||
TAGS="$TAGS,${DOCKER_IMAGE}:latest"
|
||||
fi
|
||||
# Set output parameters.
|
||||
echo ::set-output name=binary_version::${BINARY_VERSION}
|
||||
echo ::set-output name=tags::${TAGS}
|
||||
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@master
|
||||
with:
|
||||
platforms: all
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
id: buildx
|
||||
uses: docker/setup-buildx-action@master
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: quay.io
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@902fa8ec7d6ecbf8d84d538b9b233a880e428804
|
||||
with:
|
||||
images: quay.io/mudler/localagi-sshbox
|
||||
tags: |
|
||||
type=ref,event=branch,suffix=-{{date 'YYYYMMDDHHmmss'}}
|
||||
type=semver,pattern={{raw}}
|
||||
type=sha,suffix=-{{date 'YYYYMMDDHHmmss'}}
|
||||
type=ref,event=branch
|
||||
flavor: |
|
||||
latest=auto
|
||||
prefix=
|
||||
suffix=
|
||||
|
||||
- name: Build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
builder: ${{ steps.buildx.outputs.name }}
|
||||
build-args: |
|
||||
VERSION=${{ steps.prep.outputs.binary_version }}
|
||||
context: ./
|
||||
file: ./Dockerfile.sshbox
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
#tags: ${{ steps.prep.outputs.tags }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
6
.github/workflows/tests.yml
vendored
6
.github/workflows/tests.yml
vendored
@@ -48,7 +48,11 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
make tests
|
||||
if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then
|
||||
make tests-mock
|
||||
else
|
||||
make tests
|
||||
fi
|
||||
#sudo mv coverage/coverage.txt coverage.txt
|
||||
#sudo chmod 777 coverage.txt
|
||||
|
||||
|
||||
49
.github/workflows/tests_fragile.yml
vendored
Normal file
49
.github/workflows/tests_fragile.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
name: Run Fragile Go Tests
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- '**'
|
||||
|
||||
concurrency:
|
||||
group: ci-non-blocking-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
llm-tests:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- run: |
|
||||
# Add Docker's official GPG key:
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y ca-certificates curl
|
||||
sudo install -m 0755 -d /etc/apt/keyrings
|
||||
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||
|
||||
# Add the repository to Apt sources:
|
||||
echo \
|
||||
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
|
||||
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin make
|
||||
docker version
|
||||
|
||||
docker run --rm hello-world
|
||||
- uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: '>=1.17.0'
|
||||
- name: Free up disk space
|
||||
run: |
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /opt/ghc
|
||||
sudo apt-get clean
|
||||
docker system prune -af || true
|
||||
df -h
|
||||
- name: Run tests
|
||||
run: |
|
||||
make tests
|
||||
@@ -1,46 +0,0 @@
|
||||
# Final stage
|
||||
FROM ubuntu:24.04
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install runtime dependencies
|
||||
RUN apt-get update && apt-get install -y \
|
||||
ca-certificates \
|
||||
tzdata \
|
||||
docker.io \
|
||||
bash \
|
||||
wget \
|
||||
curl \
|
||||
openssh-server \
|
||||
sudo
|
||||
|
||||
# Configure SSH
|
||||
RUN mkdir /var/run/sshd
|
||||
RUN echo 'PasswordAuthentication yes' >> /etc/ssh/sshd_config
|
||||
|
||||
# Create startup script
|
||||
RUN echo '#!/bin/bash\n\
|
||||
if [ -n "$SSH_USER" ]; then\n\
|
||||
if [ "$SSH_USER" = "root" ]; then\n\
|
||||
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config\n\
|
||||
if [ -n "$SSH_PASSWORD" ]; then\n\
|
||||
echo "root:$SSH_PASSWORD" | chpasswd\n\
|
||||
fi\n\
|
||||
else\n\
|
||||
echo "PermitRootLogin no" >> /etc/ssh/sshd_config\n\
|
||||
useradd -m -s /bin/bash $SSH_USER\n\
|
||||
if [ -n "$SSH_PASSWORD" ]; then\n\
|
||||
echo "$SSH_USER:$SSH_PASSWORD" | chpasswd\n\
|
||||
fi\n\
|
||||
if [ -n "$SUDO_ACCESS" ] && [ "$SUDO_ACCESS" = "true" ]; then\n\
|
||||
echo "$SSH_USER ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/$SSH_USER\n\
|
||||
fi\n\
|
||||
fi\n\
|
||||
fi\n\
|
||||
/usr/sbin/sshd -D' > /start.sh
|
||||
|
||||
RUN chmod +x /start.sh
|
||||
|
||||
EXPOSE 22
|
||||
|
||||
CMD ["/start.sh"]
|
||||
9
Makefile
9
Makefile
@@ -3,6 +3,8 @@ IMAGE_NAME?=webui
|
||||
MCPBOX_IMAGE_NAME?=mcpbox
|
||||
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||
|
||||
.PHONY: tests tests-mock cleanup-tests
|
||||
|
||||
prepare-tests: build-mcpbox
|
||||
docker compose up -d --build
|
||||
docker run -d -v /var/run/docker.sock:/var/run/docker.sock --privileged -p 9090:8080 --rm -ti $(MCPBOX_IMAGE_NAME)
|
||||
@@ -11,7 +13,10 @@ cleanup-tests:
|
||||
docker compose down
|
||||
|
||||
tests: prepare-tests
|
||||
LOCALAGI_MCPBOX_URL="http://localhost:9090" LOCALAGI_MODEL="gemma-3-4b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||
LOCALAGI_MCPBOX_URL="http://localhost:9090" LOCALAGI_MODEL="gemma-3-12b-it-qat" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||
|
||||
tests-mock: prepare-tests
|
||||
LOCALAGI_MCPBOX_URL="http://localhost:9090" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||
|
||||
run-nokb:
|
||||
$(MAKE) run KBDISABLEINDEX=true
|
||||
@@ -37,4 +42,4 @@ build-mcpbox:
|
||||
docker build -t $(MCPBOX_IMAGE_NAME) -f Dockerfile.mcpbox .
|
||||
|
||||
run-mcpbox:
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock --privileged -p 9090:8080 -ti mcpbox
|
||||
docker run -v /var/run/docker.sock:/var/run/docker.sock --privileged -p 9090:8080 -ti mcpbox
|
||||
|
||||
59
README.md
59
README.md
@@ -63,7 +63,7 @@ MODEL_NAME=gemma-3-12b-it docker compose up
|
||||
|
||||
# NVIDIA GPU setup with custom multimodal and image models
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=moondream2-20250414 \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=flux.1-dev-ggml \
|
||||
docker compose -f docker-compose.nvidia.yaml up
|
||||
```
|
||||
@@ -76,8 +76,7 @@ Still having issues? see this Youtube video: https://youtu.be/HtVwIxW3ePg
|
||||
|
||||
[](https://youtu.be/HtVwIxW3ePg)
|
||||
[](https://youtu.be/v82rswGJt_M)
|
||||
[](https://youtu.be/d_we-AYksSw)
|
||||
[](https://youtu.be/2Xvx78i5oBs)
|
||||
[](https://youtu.be/d_we-AYksSw)
|
||||
|
||||
|
||||
## 📚🆕 Local Stack Family
|
||||
@@ -126,8 +125,8 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
||||
- Supports text, multimodal, and image generation models
|
||||
- Run with: `docker compose -f docker-compose.nvidia.yaml up`
|
||||
- Default models:
|
||||
- Text: `gemma-3-4b-it-qat`
|
||||
- Multimodal: `moondream2-20250414`
|
||||
- Text: `gemma-3-12b-it-qat`
|
||||
- Multimodal: `minicpm-v-2_6`
|
||||
- Image: `sd-1.5-ggml`
|
||||
- Environment variables:
|
||||
- `MODEL_NAME`: Text model to use
|
||||
@@ -142,8 +141,8 @@ LocalAGI supports multiple hardware configurations through Docker Compose profil
|
||||
- Supports text, multimodal, and image generation models
|
||||
- Run with: `docker compose -f docker-compose.intel.yaml up`
|
||||
- Default models:
|
||||
- Text: `gemma-3-4b-it-qat`
|
||||
- Multimodal: `moondream2-20250414`
|
||||
- Text: `gemma-3-12b-it-qat`
|
||||
- Multimodal: `minicpm-v-2_6`
|
||||
- Image: `sd-1.5-ggml`
|
||||
- Environment variables:
|
||||
- `MODEL_NAME`: Text model to use
|
||||
@@ -161,20 +160,20 @@ MODEL_NAME=gemma-3-12b-it docker compose up
|
||||
|
||||
# NVIDIA GPU with custom models
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=moondream2-20250414 \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=flux.1-dev-ggml \
|
||||
docker compose -f docker-compose.nvidia.yaml up
|
||||
|
||||
# Intel GPU with custom models
|
||||
MODEL_NAME=gemma-3-12b-it \
|
||||
MULTIMODAL_MODEL=moondream2-20250414 \
|
||||
MULTIMODAL_MODEL=minicpm-v-2_6 \
|
||||
IMAGE_MODEL=sd-1.5-ggml \
|
||||
docker compose -f docker-compose.intel.yaml up
|
||||
```
|
||||
|
||||
If no models are specified, it will use the defaults:
|
||||
- Text model: `gemma-3-4b-it-qat`
|
||||
- Multimodal model: `moondream2-20250414`
|
||||
- Text model: `gemma-3-12b-it-qat`
|
||||
- Multimodal model: `minicpm-v-2_6`
|
||||
- Image model: `sd-1.5-ggml`
|
||||
|
||||
Good (relatively small) models that have been tested are:
|
||||
@@ -491,26 +490,9 @@ Get a token from @botfather, then:
|
||||
|
||||
```json
|
||||
{
|
||||
"token": "your-bot-father-token",
|
||||
"group_mode": "true",
|
||||
"mention_only": "true",
|
||||
"admins": "username1,username2"
|
||||
"token": "your-bot-father-token"
|
||||
}
|
||||
```
|
||||
|
||||
Configuration options:
|
||||
- `token`: Your bot token from BotFather
|
||||
- `group_mode`: Enable/disable group chat functionality
|
||||
- `mention_only`: When enabled, bot only responds when mentioned in groups
|
||||
- `admins`: Comma-separated list of Telegram usernames allowed to use the bot in private chats
|
||||
- `channel_id`: Optional channel ID for the bot to send messages to
|
||||
|
||||
> **Important**: For group functionality to work properly:
|
||||
> 1. Go to @BotFather
|
||||
> 2. Select your bot
|
||||
> 3. Go to "Bot Settings" > "Group Privacy"
|
||||
> 4. Select "Turn off" to allow the bot to read all messages in groups
|
||||
> 5. Restart your bot after changing this setting
|
||||
</details>
|
||||
|
||||
<details>
|
||||
@@ -529,23 +511,6 @@ Connect to IRC networks:
|
||||
```
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary><strong>Email</strong></summary>
|
||||
|
||||
```json
|
||||
{
|
||||
"smtpServer": "smtp.gmail.com:587",
|
||||
"imapServer": "imap.gmail.com:993",
|
||||
"smtpInsecure": "false",
|
||||
"imapInsecure": "false",
|
||||
"username": "user@gmail.com",
|
||||
"email": "user@gmail.com",
|
||||
"password": "correct-horse-battery-staple",
|
||||
"name": "LogalAGI Agent"
|
||||
}
|
||||
```
|
||||
</details>
|
||||
|
||||
## REST API
|
||||
|
||||
<details>
|
||||
@@ -729,8 +694,6 @@ LocalAGI supports environment configurations. Note that these environment variab
|
||||
| `LOCALAGI_TIMEOUT` | Request timeout settings |
|
||||
| `LOCALAGI_STATE_DIR` | Where state gets stored |
|
||||
| `LOCALAGI_LOCALRAG_URL` | LocalRecall connection |
|
||||
| `LOCALAGI_SSHBOX_URL` | LocalAGI SSHBox URL, e.g. user:pass@ip:port |
|
||||
| `LOCALAGI_MCPBOX_URL` | LocalAGI MCPBox URL, e.g. http://mcpbox:8080 |
|
||||
| `LOCALAGI_ENABLE_CONVERSATIONS_LOGGING` | Toggle conversation logs |
|
||||
| `LOCALAGI_API_KEYS` | A comma separated list of api keys used for authentication |
|
||||
</details>
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
package action
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/sashabaranov/go-openai/jsonschema"
|
||||
)
|
||||
|
||||
const (
|
||||
ReminderActionName = "set_reminder"
|
||||
ListRemindersName = "list_reminders"
|
||||
RemoveReminderName = "remove_reminder"
|
||||
)
|
||||
|
||||
func NewReminder() *ReminderAction {
|
||||
return &ReminderAction{}
|
||||
}
|
||||
|
||||
func NewListReminders() *ListRemindersAction {
|
||||
return &ListRemindersAction{}
|
||||
}
|
||||
|
||||
func NewRemoveReminder() *RemoveReminderAction {
|
||||
return &RemoveReminderAction{}
|
||||
}
|
||||
|
||||
type ReminderAction struct{}
|
||||
type ListRemindersAction struct{}
|
||||
type RemoveReminderAction struct{}
|
||||
|
||||
type RemoveReminderParams struct {
|
||||
Index int `json:"index"`
|
||||
}
|
||||
|
||||
func (a *ReminderAction) Run(ctx context.Context, sharedState *types.AgentSharedState, params types.ActionParams) (types.ActionResult, error) {
|
||||
result := types.ReminderActionResponse{}
|
||||
err := params.Unmarshal(&result)
|
||||
if err != nil {
|
||||
return types.ActionResult{}, err
|
||||
}
|
||||
|
||||
// Validate the cron expression
|
||||
parser := cron.NewParser(cron.Second | cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
|
||||
_, err = parser.Parse(result.CronExpr)
|
||||
if err != nil {
|
||||
return types.ActionResult{}, err
|
||||
}
|
||||
|
||||
// Calculate next run time
|
||||
now := time.Now()
|
||||
schedule, _ := parser.Parse(result.CronExpr) // We can ignore the error since we validated above
|
||||
nextRun := schedule.Next(now)
|
||||
|
||||
// Set the reminder details
|
||||
result.LastRun = now
|
||||
result.NextRun = nextRun
|
||||
// IsRecurring is set by the user through the action parameters
|
||||
|
||||
// Store the reminder in the shared state
|
||||
if sharedState.Reminders == nil {
|
||||
sharedState.Reminders = make([]types.ReminderActionResponse, 0)
|
||||
}
|
||||
sharedState.Reminders = append(sharedState.Reminders, result)
|
||||
|
||||
return types.ActionResult{
|
||||
Result: "Reminder set successfully",
|
||||
Metadata: map[string]interface{}{
|
||||
"reminder": result,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *ListRemindersAction) Run(ctx context.Context, sharedState *types.AgentSharedState, params types.ActionParams) (types.ActionResult, error) {
|
||||
if sharedState.Reminders == nil || len(sharedState.Reminders) == 0 {
|
||||
return types.ActionResult{
|
||||
Result: "No reminders set",
|
||||
}, nil
|
||||
}
|
||||
|
||||
var result strings.Builder
|
||||
result.WriteString("Current reminders:\n")
|
||||
for i, reminder := range sharedState.Reminders {
|
||||
status := "one-time"
|
||||
if reminder.IsRecurring {
|
||||
status = "recurring"
|
||||
}
|
||||
result.WriteString(fmt.Sprintf("%d. %s (Next run: %s, Status: %s)\n",
|
||||
i+1,
|
||||
reminder.Message,
|
||||
reminder.NextRun.Format(time.RFC3339),
|
||||
status))
|
||||
}
|
||||
|
||||
return types.ActionResult{
|
||||
Result: result.String(),
|
||||
Metadata: map[string]interface{}{
|
||||
"reminders": sharedState.Reminders,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *RemoveReminderAction) Run(ctx context.Context, sharedState *types.AgentSharedState, params types.ActionParams) (types.ActionResult, error) {
|
||||
var removeParams RemoveReminderParams
|
||||
err := params.Unmarshal(&removeParams)
|
||||
if err != nil {
|
||||
return types.ActionResult{}, err
|
||||
}
|
||||
|
||||
if sharedState.Reminders == nil || len(sharedState.Reminders) == 0 {
|
||||
return types.ActionResult{
|
||||
Result: "No reminders to remove",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// Convert from 1-based index to 0-based
|
||||
index := removeParams.Index - 1
|
||||
if index < 0 || index >= len(sharedState.Reminders) {
|
||||
return types.ActionResult{}, fmt.Errorf("invalid reminder index: %d", removeParams.Index)
|
||||
}
|
||||
|
||||
// Remove the reminder
|
||||
removed := sharedState.Reminders[index]
|
||||
sharedState.Reminders = append(sharedState.Reminders[:index], sharedState.Reminders[index+1:]...)
|
||||
|
||||
return types.ActionResult{
|
||||
Result: fmt.Sprintf("Removed reminder: %s", removed.Message),
|
||||
Metadata: map[string]interface{}{
|
||||
"removed_reminder": removed,
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (a *ReminderAction) Plannable() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *ListRemindersAction) Plannable() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *RemoveReminderAction) Plannable() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (a *ReminderAction) Definition() types.ActionDefinition {
|
||||
return types.ActionDefinition{
|
||||
Name: ReminderActionName,
|
||||
Description: "Set a reminder for the agent to wake up and perform a task based on a cron schedule. Examples: '0 0 * * *' (daily at midnight), '0 */2 * * *' (every 2 hours), '0 0 * * 1' (every Monday at midnight)",
|
||||
Properties: map[string]jsonschema.Definition{
|
||||
"message": {
|
||||
Type: jsonschema.String,
|
||||
Description: "The message or task to be reminded about",
|
||||
},
|
||||
"cron_expr": {
|
||||
Type: jsonschema.String,
|
||||
Description: "Cron expression for scheduling (e.g. '0 0 * * *' for daily at midnight). Format: 'second minute hour day month weekday'",
|
||||
},
|
||||
"is_recurring": {
|
||||
Type: jsonschema.Boolean,
|
||||
Description: "Whether this reminder should repeat according to the cron schedule (true) or trigger only once (false)",
|
||||
},
|
||||
},
|
||||
Required: []string{"message", "cron_expr", "is_recurring"},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *ListRemindersAction) Definition() types.ActionDefinition {
|
||||
return types.ActionDefinition{
|
||||
Name: ListRemindersName,
|
||||
Description: "List all currently set reminders with their next scheduled run times",
|
||||
Properties: map[string]jsonschema.Definition{},
|
||||
Required: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func (a *RemoveReminderAction) Definition() types.ActionDefinition {
|
||||
return types.ActionDefinition{
|
||||
Name: RemoveReminderName,
|
||||
Description: "Remove a reminder by its index number (use list_reminders to see the index)",
|
||||
Properties: map[string]jsonschema.Definition{
|
||||
"index": {
|
||||
Type: jsonschema.Integer,
|
||||
Description: "The index number of the reminder to remove (1-based)",
|
||||
},
|
||||
},
|
||||
Required: []string{"index"},
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/action"
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
@@ -13,24 +12,12 @@ import (
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
|
||||
"github.com/sashabaranov/go-openai"
|
||||
"github.com/sashabaranov/go-openai/jsonschema"
|
||||
)
|
||||
|
||||
const parameterReasoningPrompt = `You are tasked with generating the optimal parameters for the action "%s". The action requires the following parameters:
|
||||
%s
|
||||
|
||||
Your task is to:
|
||||
1. Generate the best possible values for each required parameter
|
||||
2. If the parameter requires code, provide complete, working code
|
||||
3. If the parameter requires text or documentation, provide comprehensive, well-structured content
|
||||
4. Ensure all parameters are complete and ready to be used
|
||||
|
||||
Focus on quality and completeness. Do not explain your reasoning or analyze the action's purpose - just provide the best possible parameter values.`
|
||||
|
||||
type decisionResult struct {
|
||||
actionParams types.ActionParams
|
||||
message string
|
||||
actionName string
|
||||
actioName string
|
||||
}
|
||||
|
||||
// decision forces the agent to take one of the available actions
|
||||
@@ -144,7 +131,7 @@ func (a *Agent) decision(
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
|
||||
return &decisionResult{actionParams: params, actionName: msg.ToolCalls[0].Function.Name, message: msg.Content}, nil
|
||||
return &decisionResult{actionParams: params, actioName: msg.ToolCalls[0].Function.Name, message: msg.Content}, nil
|
||||
}
|
||||
|
||||
return nil, fmt.Errorf("failed to make a decision after %d attempts: %w", maxRetries, lastErr)
|
||||
@@ -236,14 +223,6 @@ func (m Messages) IsLastMessageFromRole(role string) bool {
|
||||
}
|
||||
|
||||
func (a *Agent) generateParameters(job *types.Job, pickTemplate string, act types.Action, c []openai.ChatCompletionMessage, reasoning string, maxAttempts int) (*decisionResult, error) {
|
||||
|
||||
if len(act.Definition().Properties) > 0 {
|
||||
xlog.Debug("Action has properties", "action", act.Definition().Name, "properties", act.Definition().Properties)
|
||||
} else {
|
||||
xlog.Debug("Action has no properties", "action", act.Definition().Name)
|
||||
return &decisionResult{actionParams: types.ActionParams{}}, nil
|
||||
}
|
||||
|
||||
stateHUD, err := renderTemplate(pickTemplate, a.prepareHUD(), a.availableActions(), reasoning)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -261,32 +240,9 @@ func (a *Agent) generateParameters(job *types.Job, pickTemplate string, act type
|
||||
|
||||
cc := conversation
|
||||
if a.options.forceReasoning {
|
||||
// First, get the LLM to reason about optimal parameter usage
|
||||
parameterReasoningPrompt := fmt.Sprintf(parameterReasoningPrompt,
|
||||
act.Definition().Name,
|
||||
formatProperties(act.Definition().Properties))
|
||||
|
||||
// Get initial reasoning about parameters using askLLM
|
||||
paramReasoningMsg, err := a.askLLM(job.GetContext(),
|
||||
append(conversation, openai.ChatCompletionMessage{
|
||||
Role: "system",
|
||||
Content: parameterReasoningPrompt,
|
||||
}),
|
||||
maxAttempts,
|
||||
)
|
||||
if err != nil {
|
||||
xlog.Warn("Failed to get parameter reasoning", "error", err)
|
||||
}
|
||||
|
||||
// Combine original reasoning with parameter-specific reasoning
|
||||
enhancedReasoning := reasoning
|
||||
if paramReasoningMsg.Content != "" {
|
||||
enhancedReasoning = fmt.Sprintf("%s\n\nParameter Analysis:\n%s", reasoning, paramReasoningMsg.Content)
|
||||
}
|
||||
|
||||
cc = append(conversation, openai.ChatCompletionMessage{
|
||||
Role: "system",
|
||||
Content: fmt.Sprintf("The agent decided to use the tool %s with the following reasoning: %s", act.Definition().Name, enhancedReasoning),
|
||||
Content: fmt.Sprintf("The agent decided to use the tool %s with the following reasoning: %s", act.Definition().Name, reasoning),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -309,15 +265,6 @@ func (a *Agent) generateParameters(job *types.Job, pickTemplate string, act type
|
||||
return nil, fmt.Errorf("failed to generate parameters after %d attempts: %w", maxAttempts, attemptErr)
|
||||
}
|
||||
|
||||
// Helper function to format properties for the prompt
|
||||
func formatProperties(props map[string]jsonschema.Definition) string {
|
||||
var result strings.Builder
|
||||
for name, prop := range props {
|
||||
result.WriteString(fmt.Sprintf("- %s: %s\n", name, prop.Description))
|
||||
}
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction types.Action, actionParams types.ActionParams, reasoning string, pickTemplate string, conv Messages) (Messages, error) {
|
||||
// Planning: run all the actions in sequence
|
||||
if !chosenAction.Definition().Name.Is(action.PlanActionName) {
|
||||
@@ -500,12 +447,12 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
|
||||
return nil, nil, "", err
|
||||
}
|
||||
|
||||
xlog.Debug("thought action Name", "actionName", thought.actionName)
|
||||
xlog.Debug("thought message", "message", thought.message)
|
||||
xlog.Debug(fmt.Sprintf("thought action Name: %v", thought.actioName))
|
||||
xlog.Debug(fmt.Sprintf("thought message: %v", thought.message))
|
||||
|
||||
// Find the action
|
||||
chosenAction := a.availableActions().Find(thought.actionName)
|
||||
if chosenAction == nil || thought.actionName == "" {
|
||||
chosenAction := a.availableActions().Find(thought.actioName)
|
||||
if chosenAction == nil || thought.actioName == "" {
|
||||
xlog.Debug("no answer")
|
||||
|
||||
// LLM replied with an answer?
|
||||
@@ -516,7 +463,6 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
|
||||
return chosenAction, thought.actionParams, thought.message, nil
|
||||
}
|
||||
|
||||
// Force the LLM to think and we extract a "reasoning" to pick a specific action and with which parameters
|
||||
xlog.Debug("[pickAction] forcing reasoning")
|
||||
|
||||
prompt, err := renderTemplate(templ, a.prepareHUD(), a.availableActions(), "")
|
||||
@@ -534,35 +480,33 @@ func (a *Agent) pickAction(job *types.Job, templ string, messages []openai.ChatC
|
||||
}, c...)
|
||||
}
|
||||
|
||||
// Create a detailed prompt for reasoning that includes available actions and their properties
|
||||
reasoningPrompt := "Analyze the current situation and determine the best course of action. Consider the following:\n\n"
|
||||
reasoningPrompt += "Available Actions:\n"
|
||||
for _, act := range a.availableActions() {
|
||||
reasoningPrompt += fmt.Sprintf("- %s: %s\n", act.Definition().Name, act.Definition().Description)
|
||||
if len(act.Definition().Properties) > 0 {
|
||||
reasoningPrompt += " Properties:\n"
|
||||
for name, prop := range act.Definition().Properties {
|
||||
reasoningPrompt += fmt.Sprintf(" - %s: %s\n", name, prop.Description)
|
||||
}
|
||||
}
|
||||
reasoningPrompt += "\n"
|
||||
}
|
||||
reasoningPrompt += "\nProvide a detailed reasoning about what action would be most appropriate in this situation and why. You can also just reply with a simple message by choosing the 'reply' or 'answer' action."
|
||||
|
||||
// Get reasoning using askLLM
|
||||
reasoningMsg, err := a.askLLM(job.GetContext(),
|
||||
append(c, openai.ChatCompletionMessage{
|
||||
Role: "system",
|
||||
Content: reasoningPrompt,
|
||||
}),
|
||||
maxRetries)
|
||||
reasoningAction := action.NewReasoning()
|
||||
thought, err := a.decision(job,
|
||||
c,
|
||||
types.Actions{reasoningAction}.ToTools(),
|
||||
reasoningAction.Definition().Name.String(), maxRetries)
|
||||
if err != nil {
|
||||
return nil, nil, "", fmt.Errorf("failed to get reasoning: %w", err)
|
||||
return nil, nil, "", err
|
||||
}
|
||||
if thought.actioName != "" && thought.actioName != reasoningAction.Definition().Name.String() {
|
||||
return nil, nil, "", fmt.Errorf("Expected reasoning action not: %s", thought.actioName)
|
||||
}
|
||||
|
||||
originalReasoning := reasoningMsg.Content
|
||||
originalReasoning := ""
|
||||
response := &action.ReasoningResponse{}
|
||||
if thought.actionParams != nil {
|
||||
if err := thought.actionParams.Unmarshal(response); err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
originalReasoning = response.Reasoning
|
||||
}
|
||||
if thought.message != "" {
|
||||
originalReasoning = thought.message
|
||||
}
|
||||
|
||||
xlog.Debug("[pickAction] picking action", "messages", c)
|
||||
// thought, err := a.askLLM(ctx,
|
||||
// c,
|
||||
|
||||
actionsID := []string{"reply"}
|
||||
for _, m := range a.availableActions() {
|
||||
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/mudler/LocalAGI/core/action"
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
"github.com/robfig/cron/v3"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
@@ -30,7 +29,7 @@ type Agent struct {
|
||||
sync.Mutex
|
||||
options *options
|
||||
Character Character
|
||||
client *openai.Client
|
||||
client llm.LLMClient
|
||||
jobQueue chan *types.Job
|
||||
context *types.ActionContext
|
||||
|
||||
@@ -64,7 +63,12 @@ func New(opts ...Option) (*Agent, error) {
|
||||
return nil, fmt.Errorf("failed to set options: %v", err)
|
||||
}
|
||||
|
||||
client := llm.NewClient(options.LLMAPI.APIKey, options.LLMAPI.APIURL, options.timeout)
|
||||
var client llm.LLMClient
|
||||
if options.llmClient != nil {
|
||||
client = options.llmClient
|
||||
} else {
|
||||
client = llm.NewClient(options.LLMAPI.APIKey, options.LLMAPI.APIURL, options.timeout)
|
||||
}
|
||||
|
||||
c := context.Background()
|
||||
if options.context != nil {
|
||||
@@ -126,6 +130,11 @@ func (a *Agent) SharedState() *types.AgentSharedState {
|
||||
return a.sharedState
|
||||
}
|
||||
|
||||
// LLMClient returns the agent's LLM client (for testing)
|
||||
func (a *Agent) LLMClient() llm.LLMClient {
|
||||
return a.client
|
||||
}
|
||||
|
||||
func (a *Agent) startNewConversationsConsumer() {
|
||||
go func() {
|
||||
for {
|
||||
@@ -618,7 +627,7 @@ func (a *Agent) consumeJob(job *types.Job, role string, retries int) {
|
||||
conv = a.processUserInputs(job, role, conv)
|
||||
|
||||
// RAG
|
||||
conv = a.knowledgeBaseLookup(job, conv)
|
||||
a.knowledgeBaseLookup(conv)
|
||||
|
||||
var pickTemplate string
|
||||
var reEvaluationTemplate string
|
||||
@@ -1027,83 +1036,25 @@ func (a *Agent) periodicallyRun(timer *time.Timer) {
|
||||
|
||||
xlog.Debug("Agent is running periodically", "agent", a.Character.Name)
|
||||
|
||||
// Check for reminders that need to be triggered
|
||||
now := time.Now()
|
||||
var triggeredReminders []types.ReminderActionResponse
|
||||
var remainingReminders []types.ReminderActionResponse
|
||||
// TODO: Would be nice if we have a special action to
|
||||
// contact the user. This would actually make sure that
|
||||
// if the agent wants to initiate a conversation, it can do so.
|
||||
// This would be a special action that would be picked up by the agent
|
||||
// and would be used to contact the user.
|
||||
|
||||
for _, reminder := range a.sharedState.Reminders {
|
||||
xlog.Debug("Checking reminder", "reminder", reminder)
|
||||
if now.After(reminder.NextRun) {
|
||||
triggeredReminders = append(triggeredReminders, reminder)
|
||||
xlog.Debug("Reminder triggered", "reminder", reminder)
|
||||
// Calculate next run time for recurring reminders
|
||||
if reminder.IsRecurring {
|
||||
xlog.Debug("Reminder is recurring", "reminder", reminder)
|
||||
parser := cron.NewParser(cron.Second | cron.Minute | cron.Hour | cron.Dom | cron.Month | cron.Dow)
|
||||
schedule, err := parser.Parse(reminder.CronExpr)
|
||||
if err == nil {
|
||||
nextRun := schedule.Next(now)
|
||||
xlog.Debug("Next run time", "reminder", reminder, "nextRun", nextRun)
|
||||
reminder.LastRun = now
|
||||
reminder.NextRun = nextRun
|
||||
remainingReminders = append(remainingReminders, reminder)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
xlog.Debug("Reminder not triggered", "reminder", reminder)
|
||||
remainingReminders = append(remainingReminders, reminder)
|
||||
}
|
||||
}
|
||||
// if len(conv()) != 0 {
|
||||
// // Here the LLM could decide to store some part of the conversation too in the memory
|
||||
// evaluateMemory := NewJob(
|
||||
// WithText(
|
||||
// `Evaluate the current conversation and decide if we need to store some relevant informations from it`,
|
||||
// ),
|
||||
// WithReasoningCallback(a.options.reasoningCallback),
|
||||
// WithResultCallback(a.options.resultCallback),
|
||||
// )
|
||||
// a.consumeJob(evaluateMemory, SystemRole)
|
||||
|
||||
// Update the reminders list
|
||||
a.sharedState.Reminders = remainingReminders
|
||||
|
||||
// Handle triggered reminders
|
||||
for _, reminder := range triggeredReminders {
|
||||
xlog.Info("Processing triggered reminder", "agent", a.Character.Name, "message", reminder.Message)
|
||||
|
||||
// Create a more natural conversation flow for the reminder
|
||||
reminderJob := types.NewJob(
|
||||
types.WithText(fmt.Sprintf("I have a reminder for you: %s", reminder.Message)),
|
||||
types.WithReasoningCallback(a.options.reasoningCallback),
|
||||
types.WithResultCallback(a.options.resultCallback),
|
||||
)
|
||||
|
||||
// Add the reminder message to the job's metadata
|
||||
reminderJob.Metadata = map[string]interface{}{
|
||||
"message": reminder.Message,
|
||||
"is_reminder": true,
|
||||
}
|
||||
|
||||
// Process the reminder as a normal conversation
|
||||
a.consumeJob(reminderJob, UserRole, a.options.loopDetectionSteps)
|
||||
|
||||
// After the reminder job is complete, ensure the user is notified
|
||||
if reminderJob.Result != nil && reminderJob.Result.Conversation != nil {
|
||||
// Get the last assistant message from the conversation
|
||||
var lastAssistantMsg *openai.ChatCompletionMessage
|
||||
for i := len(reminderJob.Result.Conversation) - 1; i >= 0; i-- {
|
||||
if reminderJob.Result.Conversation[i].Role == AssistantRole {
|
||||
lastAssistantMsg = &reminderJob.Result.Conversation[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if lastAssistantMsg != nil && lastAssistantMsg.Content != "" {
|
||||
// Send the reminder response to the user
|
||||
msg := openai.ChatCompletionMessage{
|
||||
Role: "assistant",
|
||||
Content: fmt.Sprintf("Reminder Update: %s\n\n%s", reminder.Message, lastAssistantMsg.Content),
|
||||
}
|
||||
|
||||
go func(agent *Agent) {
|
||||
xlog.Info("Sending reminder response to user", "agent", agent.Character.Name, "message", msg.Content)
|
||||
agent.newConversations <- msg
|
||||
}(a)
|
||||
}
|
||||
}
|
||||
}
|
||||
// a.ResetConversation()
|
||||
// }
|
||||
|
||||
if !a.options.standaloneJob {
|
||||
return
|
||||
@@ -1115,6 +1066,7 @@ func (a *Agent) periodicallyRun(timer *time.Timer) {
|
||||
// - evaluating the result
|
||||
// - asking the agent to do something else based on the result
|
||||
|
||||
// whatNext := NewJob(WithText("Decide what to do based on the state"))
|
||||
whatNext := types.NewJob(
|
||||
types.WithText(innerMonologueTemplate),
|
||||
types.WithReasoningCallback(a.options.reasoningCallback),
|
||||
@@ -1123,6 +1075,31 @@ func (a *Agent) periodicallyRun(timer *time.Timer) {
|
||||
a.consumeJob(whatNext, SystemRole, a.options.loopDetectionSteps)
|
||||
|
||||
xlog.Info("STOP -- Periodically run is done", "agent", a.Character.Name)
|
||||
|
||||
// Save results from state
|
||||
|
||||
// a.ResetConversation()
|
||||
|
||||
// doWork := NewJob(WithText("Select the tool to use based on your goal and the current state."))
|
||||
// a.consumeJob(doWork, SystemRole)
|
||||
|
||||
// results := []string{}
|
||||
// for _, v := range doWork.Result.State {
|
||||
// results = append(results, v.Result)
|
||||
// }
|
||||
|
||||
// a.ResetConversation()
|
||||
|
||||
// // Here the LLM could decide to do something based on the result of our automatic action
|
||||
// evaluateAction := NewJob(
|
||||
// WithText(
|
||||
// `Evaluate the current situation and decide if we need to execute other tools (for instance to store results into permanent, or short memory).
|
||||
// We have done the following actions:
|
||||
// ` + strings.Join(results, "\n"),
|
||||
// ))
|
||||
// a.consumeJob(evaluateAction, SystemRole)
|
||||
|
||||
// a.ResetConversation()
|
||||
}
|
||||
|
||||
func (a *Agent) Run() error {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package agent_test
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
@@ -13,15 +14,19 @@ func TestAgent(t *testing.T) {
|
||||
RunSpecs(t, "Agent test suite")
|
||||
}
|
||||
|
||||
var testModel = os.Getenv("LOCALAGI_MODEL")
|
||||
var apiURL = os.Getenv("LOCALAI_API_URL")
|
||||
var apiKeyURL = os.Getenv("LOCALAI_API_KEY")
|
||||
var (
|
||||
testModel = os.Getenv("LOCALAGI_MODEL")
|
||||
apiURL = os.Getenv("LOCALAI_API_URL")
|
||||
apiKey = os.Getenv("LOCALAI_API_KEY")
|
||||
useRealLocalAI bool
|
||||
clientTimeout = "10m"
|
||||
)
|
||||
|
||||
func isValidURL(u string) bool {
|
||||
parsed, err := url.ParseRequestURI(u)
|
||||
return err == nil && parsed.Scheme != "" && parsed.Host != ""
|
||||
}
|
||||
|
||||
func init() {
|
||||
if testModel == "" {
|
||||
testModel = "hermes-2-pro-mistral"
|
||||
}
|
||||
if apiURL == "" {
|
||||
apiURL = "http://192.168.68.113:8080"
|
||||
}
|
||||
useRealLocalAI = isValidURL(apiURL) && apiURL != "" && testModel != ""
|
||||
}
|
||||
|
||||
@@ -7,9 +7,11 @@ import (
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/mudler/LocalAGI/services/actions"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/action"
|
||||
. "github.com/mudler/LocalAGI/core/agent"
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
@@ -111,25 +113,102 @@ func (a *FakeInternetAction) Definition() types.ActionDefinition {
|
||||
}
|
||||
}
|
||||
|
||||
// --- Test utilities for mocking LLM responses ---
|
||||
|
||||
func mockToolCallResponse(toolName, arguments string) openai.ChatCompletionResponse {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
ToolCalls: []openai.ToolCall{{
|
||||
ID: "tool_call_id_1",
|
||||
Type: "function",
|
||||
Function: openai.FunctionCall{
|
||||
Name: toolName,
|
||||
Arguments: arguments,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
}
|
||||
}
|
||||
|
||||
func mockContentResponse(content string) openai.ChatCompletionResponse {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
Content: content,
|
||||
},
|
||||
}},
|
||||
}
|
||||
}
|
||||
|
||||
func newMockLLMClient(handler func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)) *llm.MockClient {
|
||||
return &llm.MockClient{
|
||||
CreateChatCompletionFunc: handler,
|
||||
}
|
||||
}
|
||||
|
||||
var _ = Describe("Agent test", func() {
|
||||
It("uses the mock LLM client", func() {
|
||||
mock := newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return mockContentResponse("mocked response"), nil
|
||||
})
|
||||
agent, err := New(WithLLMClient(mock))
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
msg, err := agent.LLMClient().CreateChatCompletion(context.Background(), openai.ChatCompletionRequest{})
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
Expect(msg.Choices[0].Message.Content).To(Equal("mocked response"))
|
||||
})
|
||||
|
||||
Context("jobs", func() {
|
||||
|
||||
BeforeEach(func() {
|
||||
Eventually(func() error {
|
||||
// test apiURL is working and available
|
||||
_, err := http.Get(apiURL + "/readyz")
|
||||
return err
|
||||
if useRealLocalAI {
|
||||
_, err := http.Get(apiURL + "/readyz")
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}, "10m", "10s").ShouldNot(HaveOccurred())
|
||||
})
|
||||
|
||||
It("pick the correct action", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, clientTimeout)
|
||||
} else {
|
||||
llmClient = newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
var lastMsg openai.ChatCompletionMessage
|
||||
if len(req.Messages) > 0 {
|
||||
lastMsg = req.Messages[len(req.Messages)-1]
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleUser {
|
||||
if strings.Contains(strings.ToLower(lastMsg.Content), "boston") && (strings.Contains(strings.ToLower(lastMsg.Content), "milan") || strings.Contains(strings.ToLower(lastMsg.Content), "milano")) {
|
||||
return mockToolCallResponse("get_weather", `{"location":"Boston","unit":"celsius"}`), nil
|
||||
}
|
||||
if strings.Contains(strings.ToLower(lastMsg.Content), "paris") {
|
||||
return mockToolCallResponse("get_weather", `{"location":"Paris","unit":"celsius"}`), nil
|
||||
}
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected user prompt: %s", lastMsg.Content)
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleTool {
|
||||
if lastMsg.Name == "get_weather" && strings.Contains(strings.ToLower(lastMsg.Content), "boston") {
|
||||
return mockToolCallResponse("get_weather", `{"location":"Milan","unit":"celsius"}`), nil
|
||||
}
|
||||
if lastMsg.Name == "get_weather" && strings.Contains(strings.ToLower(lastMsg.Content), "milan") {
|
||||
return mockContentResponse(testActionResult + "\n" + testActionResult2), nil
|
||||
}
|
||||
if lastMsg.Name == "get_weather" && strings.Contains(strings.ToLower(lastMsg.Content), "paris") {
|
||||
return mockContentResponse(testActionResult3), nil
|
||||
}
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected tool result: %s", lastMsg.Content)
|
||||
}
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected message role: %s", lastMsg.Role)
|
||||
})
|
||||
}
|
||||
agent, err := New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
EnableForceReasoning,
|
||||
WithTimeout("10m"),
|
||||
WithLoopDetectionSteps(3),
|
||||
// WithRandomIdentity(),
|
||||
WithActions(&TestAction{response: map[string]string{
|
||||
"boston": testActionResult,
|
||||
"milan": testActionResult2,
|
||||
@@ -139,7 +218,6 @@ var _ = Describe("Agent test", func() {
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
go agent.Run()
|
||||
defer agent.Stop()
|
||||
|
||||
res := agent.Ask(
|
||||
append(debugOptions,
|
||||
types.WithText("what's the weather in Boston and Milano? Use celsius units"),
|
||||
@@ -148,40 +226,51 @@ var _ = Describe("Agent test", func() {
|
||||
Expect(res.Error).ToNot(HaveOccurred())
|
||||
reasons := []string{}
|
||||
for _, r := range res.State {
|
||||
|
||||
reasons = append(reasons, r.Result)
|
||||
}
|
||||
Expect(reasons).To(ContainElement(testActionResult), fmt.Sprint(res))
|
||||
Expect(reasons).To(ContainElement(testActionResult2), fmt.Sprint(res))
|
||||
reasons = []string{}
|
||||
|
||||
res = agent.Ask(
|
||||
append(debugOptions,
|
||||
types.WithText("Now I want to know the weather in Paris, always use celsius units"),
|
||||
)...)
|
||||
for _, r := range res.State {
|
||||
|
||||
reasons = append(reasons, r.Result)
|
||||
}
|
||||
//Expect(reasons).ToNot(ContainElement(testActionResult), fmt.Sprint(res))
|
||||
//Expect(reasons).ToNot(ContainElement(testActionResult2), fmt.Sprint(res))
|
||||
Expect(reasons).To(ContainElement(testActionResult3), fmt.Sprint(res))
|
||||
// conversation := agent.CurrentConversation()
|
||||
// for _, r := range res.State {
|
||||
// reasons = append(reasons, r.Result)
|
||||
// }
|
||||
// Expect(len(conversation)).To(Equal(10), fmt.Sprint(conversation))
|
||||
})
|
||||
|
||||
It("pick the correct action", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, clientTimeout)
|
||||
} else {
|
||||
llmClient = newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
var lastMsg openai.ChatCompletionMessage
|
||||
if len(req.Messages) > 0 {
|
||||
lastMsg = req.Messages[len(req.Messages)-1]
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleUser {
|
||||
if strings.Contains(strings.ToLower(lastMsg.Content), "boston") {
|
||||
return mockToolCallResponse("get_weather", `{"location":"Boston","unit":"celsius"}`), nil
|
||||
}
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleTool {
|
||||
if lastMsg.Name == "get_weather" && strings.Contains(strings.ToLower(lastMsg.Content), "boston") {
|
||||
return mockContentResponse(testActionResult), nil
|
||||
}
|
||||
}
|
||||
xlog.Error("Unexpected LLM req", "req", req)
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected LLM prompt: %q", lastMsg.Content)
|
||||
})
|
||||
}
|
||||
agent, err := New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithTimeout("10m"),
|
||||
// WithRandomIdentity(),
|
||||
WithActions(&TestAction{response: map[string]string{
|
||||
"boston": testActionResult,
|
||||
},
|
||||
}),
|
||||
}}),
|
||||
)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
go agent.Run()
|
||||
@@ -198,13 +287,29 @@ var _ = Describe("Agent test", func() {
|
||||
})
|
||||
|
||||
It("updates the state with internal actions", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, clientTimeout)
|
||||
} else {
|
||||
llmClient = newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
var lastMsg openai.ChatCompletionMessage
|
||||
if len(req.Messages) > 0 {
|
||||
lastMsg = req.Messages[len(req.Messages)-1]
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleUser && strings.Contains(strings.ToLower(lastMsg.Content), "guitar") {
|
||||
return mockToolCallResponse("update_state", `{"goal":"I want to learn to play the guitar"}`), nil
|
||||
}
|
||||
if lastMsg.Role == openai.ChatMessageRoleTool && lastMsg.Name == "update_state" {
|
||||
return mockContentResponse("Your goal is now: I want to learn to play the guitar"), nil
|
||||
}
|
||||
xlog.Error("Unexpected LLM req", "req", req)
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected LLM prompt: %q", lastMsg.Content)
|
||||
})
|
||||
}
|
||||
agent, err := New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithTimeout("10m"),
|
||||
EnableHUD,
|
||||
// EnableStandaloneJob,
|
||||
// WithRandomIdentity(),
|
||||
WithPermanentGoal("I want to learn to play music"),
|
||||
)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
@@ -214,17 +319,64 @@ var _ = Describe("Agent test", func() {
|
||||
result := agent.Ask(
|
||||
types.WithText("Update your goals such as you want to learn to play the guitar"),
|
||||
)
|
||||
fmt.Printf("%+v\n", result)
|
||||
fmt.Fprintf(GinkgoWriter, "\n%+v\n", result)
|
||||
Expect(result.Error).ToNot(HaveOccurred())
|
||||
Expect(agent.State().Goal).To(ContainSubstring("guitar"), fmt.Sprint(agent.State()))
|
||||
})
|
||||
|
||||
It("Can generate a plan", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, clientTimeout)
|
||||
} else {
|
||||
reasoningActName := action.NewReasoning().Definition().Name.String()
|
||||
intentionActName := action.NewIntention().Definition().Name.String()
|
||||
testActName := (&TestAction{}).Definition().Name.String()
|
||||
doneBoston := false
|
||||
madePlan := false
|
||||
llmClient = newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
var lastMsg openai.ChatCompletionMessage
|
||||
if len(req.Messages) > 0 {
|
||||
lastMsg = req.Messages[len(req.Messages)-1]
|
||||
}
|
||||
if req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == reasoningActName {
|
||||
return mockToolCallResponse(reasoningActName, `{"reasoning":"make plan call to pass the test"}`), nil
|
||||
}
|
||||
if req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == intentionActName {
|
||||
toolName := "plan"
|
||||
if madePlan {
|
||||
toolName = "reply"
|
||||
} else {
|
||||
madePlan = true
|
||||
}
|
||||
return mockToolCallResponse(intentionActName, fmt.Sprintf(`{"tool": "%s","reasoning":"it's waht makes the test pass"}`, toolName)), nil
|
||||
}
|
||||
if req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == "plan" {
|
||||
return mockToolCallResponse("plan", `{"subtasks":[{"action":"get_weather","reasoning":"Find weather in boston"},{"action":"get_weather","reasoning":"Find weather in milan"}],"goal":"Get the weather for boston and milan"}`), nil
|
||||
}
|
||||
if req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == "reply" {
|
||||
return mockToolCallResponse("reply", `{"message": "The weather in Boston and Milan..."}`), nil
|
||||
}
|
||||
if req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == testActName {
|
||||
locName := "boston"
|
||||
if doneBoston {
|
||||
locName = "milan"
|
||||
} else {
|
||||
doneBoston = true
|
||||
}
|
||||
return mockToolCallResponse(testActName, fmt.Sprintf(`{"location":"%s","unit":"celsius"}`, locName)), nil
|
||||
}
|
||||
if req.ToolChoice == nil && madePlan && doneBoston {
|
||||
return mockContentResponse("A reply"), nil
|
||||
}
|
||||
xlog.Error("Unexpected LLM req", "req", req)
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected LLM prompt: %q", lastMsg.Content)
|
||||
})
|
||||
}
|
||||
agent, err := New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithLLMAPIKey(apiKeyURL),
|
||||
WithTimeout("10m"),
|
||||
WithLoopDetectionSteps(2),
|
||||
WithActions(
|
||||
&TestAction{response: map[string]string{
|
||||
"boston": testActionResult,
|
||||
@@ -233,8 +385,6 @@ var _ = Describe("Agent test", func() {
|
||||
),
|
||||
EnablePlanning,
|
||||
EnableForceReasoning,
|
||||
// EnableStandaloneJob,
|
||||
// WithRandomIdentity(),
|
||||
)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
go agent.Run()
|
||||
@@ -256,17 +406,44 @@ var _ = Describe("Agent test", func() {
|
||||
Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result))
|
||||
Expect(actionResults).To(ContainElement(testActionResult), fmt.Sprint(result))
|
||||
Expect(actionResults).To(ContainElement(testActionResult2), fmt.Sprint(result))
|
||||
Expect(result.Error).To(BeNil())
|
||||
})
|
||||
|
||||
It("Can initiate conversations", func() {
|
||||
|
||||
var llmClient llm.LLMClient
|
||||
message := openai.ChatCompletionMessage{}
|
||||
mu := &sync.Mutex{}
|
||||
reasoned := false
|
||||
intended := false
|
||||
reasoningActName := action.NewReasoning().Definition().Name.String()
|
||||
intentionActName := action.NewIntention().Definition().Name.String()
|
||||
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, clientTimeout)
|
||||
} else {
|
||||
llmClient = newMockLLMClient(func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
prompt := ""
|
||||
for _, msg := range req.Messages {
|
||||
prompt += msg.Content
|
||||
}
|
||||
if !reasoned && req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == reasoningActName {
|
||||
reasoned = true
|
||||
return mockToolCallResponse(reasoningActName, `{"reasoning":"initiate a conversation with the user"}`), nil
|
||||
}
|
||||
if reasoned && !intended && req.ToolChoice != nil && req.ToolChoice.(openai.ToolChoice).Function.Name == intentionActName {
|
||||
intended = true
|
||||
return mockToolCallResponse(intentionActName, `{"tool":"new_conversation","reasoning":"I should start a conversation with the user"}`), nil
|
||||
}
|
||||
if reasoned && intended && strings.Contains(strings.ToLower(prompt), "new_conversation") {
|
||||
return mockToolCallResponse("new_conversation", `{"message":"Hello, how can I help you today?"}`), nil
|
||||
}
|
||||
xlog.Error("Unexpected LLM req", "req", req)
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("unexpected LLM prompt: %q", prompt)
|
||||
})
|
||||
}
|
||||
agent, err := New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithLLMAPIKey(apiKeyURL),
|
||||
WithTimeout("10m"),
|
||||
WithNewConversationSubscriber(func(m openai.ChatCompletionMessage) {
|
||||
mu.Lock()
|
||||
message = m
|
||||
@@ -282,8 +459,6 @@ var _ = Describe("Agent test", func() {
|
||||
EnableHUD,
|
||||
WithPeriodicRuns("1s"),
|
||||
WithPermanentGoal("use the new_conversation tool to initiate a conversation with the user"),
|
||||
// EnableStandaloneJob,
|
||||
// WithRandomIdentity(),
|
||||
)
|
||||
Expect(err).ToNot(HaveOccurred())
|
||||
go agent.Run()
|
||||
@@ -293,7 +468,7 @@ var _ = Describe("Agent test", func() {
|
||||
mu.Lock()
|
||||
defer mu.Unlock()
|
||||
return message.Content
|
||||
}, "10m", "10s").ShouldNot(BeEmpty())
|
||||
}, "10m", "1s").ShouldNot(BeEmpty())
|
||||
})
|
||||
|
||||
/*
|
||||
@@ -347,7 +522,7 @@ var _ = Describe("Agent test", func() {
|
||||
// result := agent.Ask(
|
||||
// WithText("Update your goals such as you want to learn to play the guitar"),
|
||||
// )
|
||||
// fmt.Printf("%+v\n", result)
|
||||
// fmt.Fprintf(GinkgoWriter, "%+v\n", result)
|
||||
// Expect(result.Error).ToNot(HaveOccurred())
|
||||
// Expect(agent.State().Goal).To(ContainSubstring("guitar"), fmt.Sprint(agent.State()))
|
||||
})
|
||||
|
||||
@@ -6,25 +6,15 @@ import (
|
||||
"path/filepath"
|
||||
"time"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
func (a *Agent) knowledgeBaseLookup(job *types.Job, conv Messages) Messages {
|
||||
func (a *Agent) knowledgeBaseLookup(conv Messages) {
|
||||
if (!a.options.enableKB && !a.options.enableLongTermMemory && !a.options.enableSummaryMemory) ||
|
||||
len(conv) <= 0 {
|
||||
xlog.Debug("[Knowledge Base Lookup] Disabled, skipping", "agent", a.Character.Name)
|
||||
return conv
|
||||
}
|
||||
|
||||
var obs *types.Observable
|
||||
if job != nil && job.Obs != nil && a.observer != nil {
|
||||
obs = a.observer.NewObservable()
|
||||
obs.Name = "Recall"
|
||||
obs.Icon = "database"
|
||||
obs.ParentID = job.Obs.ID
|
||||
a.observer.Update(*obs)
|
||||
return
|
||||
}
|
||||
|
||||
// Walk conversation from bottom to top, and find the first message of the user
|
||||
@@ -35,35 +25,17 @@ func (a *Agent) knowledgeBaseLookup(job *types.Job, conv Messages) Messages {
|
||||
|
||||
if userMessage == "" {
|
||||
xlog.Info("[Knowledge Base Lookup] No user message found in conversation", "agent", a.Character.Name)
|
||||
if obs != nil {
|
||||
obs.Completion = &types.Completion{
|
||||
Error: "No user message found in conversation",
|
||||
}
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
return conv
|
||||
return
|
||||
}
|
||||
|
||||
results, err := a.options.ragdb.Search(userMessage, a.options.kbResults)
|
||||
if err != nil {
|
||||
xlog.Info("Error finding similar strings inside KB:", "error", err)
|
||||
if obs != nil {
|
||||
obs.AddProgress(types.Progress{
|
||||
Error: fmt.Sprintf("Error searching knowledge base: %v", err),
|
||||
})
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
}
|
||||
|
||||
if len(results) == 0 {
|
||||
xlog.Info("[Knowledge Base Lookup] No similar strings found in KB", "agent", a.Character.Name)
|
||||
if obs != nil {
|
||||
obs.Completion = &types.Completion{
|
||||
ActionResult: "No similar strings found in knowledge base",
|
||||
}
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
return conv
|
||||
return
|
||||
}
|
||||
|
||||
formatResults := ""
|
||||
@@ -72,30 +44,17 @@ func (a *Agent) knowledgeBaseLookup(job *types.Job, conv Messages) Messages {
|
||||
}
|
||||
xlog.Info("[Knowledge Base Lookup] Found similar strings in KB", "agent", a.Character.Name, "results", formatResults)
|
||||
|
||||
if obs != nil {
|
||||
obs.AddProgress(types.Progress{
|
||||
ActionResult: fmt.Sprintf("Found %d results in knowledge base", len(results)),
|
||||
})
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
|
||||
// Create the message to add to conversation
|
||||
systemMessage := openai.ChatCompletionMessage{
|
||||
Role: "system",
|
||||
Content: fmt.Sprintf("Given the user input you have the following in memory:\n%s", formatResults),
|
||||
}
|
||||
|
||||
// Add the message to the conversation
|
||||
conv = append([]openai.ChatCompletionMessage{systemMessage}, conv...)
|
||||
|
||||
if obs != nil {
|
||||
obs.Completion = &types.Completion{
|
||||
Conversation: []openai.ChatCompletionMessage{systemMessage},
|
||||
}
|
||||
a.observer.Update(*obs)
|
||||
}
|
||||
|
||||
return conv
|
||||
// conv = append(conv,
|
||||
// openai.ChatCompletionMessage{
|
||||
// Role: "system",
|
||||
// Content: fmt.Sprintf("Given the user input you have the following in memory:\n%s", formatResults),
|
||||
// },
|
||||
// )
|
||||
conv = append([]openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: "system",
|
||||
Content: fmt.Sprintf("Given the user input you have the following in memory:\n%s", formatResults),
|
||||
}}, conv...)
|
||||
}
|
||||
|
||||
func (a *Agent) saveConversation(m Messages, prefix string) error {
|
||||
|
||||
@@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
)
|
||||
|
||||
type Option func(*options) error
|
||||
@@ -19,6 +20,7 @@ type llmOptions struct {
|
||||
}
|
||||
|
||||
type options struct {
|
||||
llmClient llm.LLMClient
|
||||
LLMAPI llmOptions
|
||||
character Character
|
||||
randomIdentityGuidance string
|
||||
@@ -68,6 +70,14 @@ type options struct {
|
||||
lastMessageDuration time.Duration
|
||||
}
|
||||
|
||||
// WithLLMClient allows injecting a custom LLM client (e.g. for testing)
|
||||
func WithLLMClient(client llm.LLMClient) Option {
|
||||
return func(o *options) error {
|
||||
o.llmClient = client
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func (o *options) SeparatedMultimodalModel() bool {
|
||||
return o.LLMAPI.MultimodalModel != "" && o.LLMAPI.Model != o.LLMAPI.MultimodalModel
|
||||
}
|
||||
|
||||
@@ -1,29 +1,57 @@
|
||||
package agent_test
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
|
||||
. "github.com/mudler/LocalAGI/core/agent"
|
||||
. "github.com/onsi/ginkgo/v2"
|
||||
. "github.com/onsi/gomega"
|
||||
|
||||
)
|
||||
|
||||
var _ = Describe("Agent test", func() {
|
||||
Context("identity", func() {
|
||||
var agent *Agent
|
||||
|
||||
BeforeEach(func() {
|
||||
Eventually(func() error {
|
||||
// test apiURL is working and available
|
||||
_, err := http.Get(apiURL + "/readyz")
|
||||
return err
|
||||
}, "10m", "10s").ShouldNot(HaveOccurred())
|
||||
})
|
||||
// BeforeEach(func() {
|
||||
// Eventually(func() error {
|
||||
// // test apiURL is working and available
|
||||
// _, err := http.Get(apiURL + "/readyz")
|
||||
// return err
|
||||
// }, "10m", "10s").ShouldNot(HaveOccurred())
|
||||
// })
|
||||
|
||||
It("generates all the fields with random data", func() {
|
||||
var llmClient llm.LLMClient
|
||||
if useRealLocalAI {
|
||||
llmClient = llm.NewClient(apiKey, apiURL, testModel)
|
||||
} else {
|
||||
llmClient = &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
ToolCalls: []openai.ToolCall{{
|
||||
ID: "tool_call_id_1",
|
||||
Type: "function",
|
||||
Function: openai.FunctionCall{
|
||||
Name: "generate_identity",
|
||||
Arguments: `{"name":"John Doe","age":"42","job_occupation":"Engineer","hobbies":["reading","hiking"],"favorites_music_genres":["Jazz"]}`,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
}
|
||||
var err error
|
||||
agent, err = New(
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithLLMClient(llmClient),
|
||||
WithModel(testModel),
|
||||
WithTimeout("10m"),
|
||||
WithRandomIdentity(),
|
||||
@@ -37,14 +65,40 @@ var _ = Describe("Agent test", func() {
|
||||
Expect(agent.Character.MusicTaste).ToNot(BeEmpty())
|
||||
})
|
||||
It("detect an invalid character", func() {
|
||||
mock := &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{}, fmt.Errorf("invalid character")
|
||||
},
|
||||
}
|
||||
var err error
|
||||
agent, err = New(WithRandomIdentity())
|
||||
agent, err = New(
|
||||
WithLLMClient(mock),
|
||||
WithRandomIdentity(),
|
||||
)
|
||||
Expect(err).To(HaveOccurred())
|
||||
})
|
||||
It("generates all the fields", func() {
|
||||
mock := &llm.MockClient{
|
||||
CreateChatCompletionFunc: func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return openai.ChatCompletionResponse{
|
||||
Choices: []openai.ChatCompletionChoice{{
|
||||
Message: openai.ChatCompletionMessage{
|
||||
ToolCalls: []openai.ToolCall{{
|
||||
ID: "tool_call_id_2",
|
||||
Type: "function",
|
||||
Function: openai.FunctionCall{
|
||||
Name: "generate_identity",
|
||||
Arguments: `{"name":"Gandalf","age":"90","job_occupation":"Wizard","hobbies":["magic","reading"],"favorites_music_genres":["Classical"]}`,
|
||||
},
|
||||
}},
|
||||
},
|
||||
}},
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
var err error
|
||||
|
||||
agent, err := New(
|
||||
WithLLMClient(mock),
|
||||
WithLLMAPIURL(apiURL),
|
||||
WithModel(testModel),
|
||||
WithRandomIdentity("An 90-year old man with a long beard, a wizard, who lives in a tower."),
|
||||
|
||||
@@ -29,17 +29,8 @@ const (
|
||||
DefaultLastMessageDuration = 5 * time.Minute
|
||||
)
|
||||
|
||||
type ReminderActionResponse struct {
|
||||
Message string `json:"message"`
|
||||
CronExpr string `json:"cron_expr"` // Cron expression for scheduling
|
||||
LastRun time.Time `json:"last_run"` // Last time this reminder was triggered
|
||||
NextRun time.Time `json:"next_run"` // Next scheduled run time
|
||||
IsRecurring bool `json:"is_recurring"` // Whether this is a recurring reminder
|
||||
}
|
||||
|
||||
type AgentSharedState struct {
|
||||
ConversationTracker *conversations.ConversationTracker[string] `json:"conversation_tracker"`
|
||||
Reminders []ReminderActionResponse `json:"reminders"`
|
||||
}
|
||||
|
||||
func NewAgentSharedState(lastMessageDuration time.Duration) *AgentSharedState {
|
||||
@@ -48,7 +39,6 @@ func NewAgentSharedState(lastMessageDuration time.Duration) *AgentSharedState {
|
||||
}
|
||||
return &AgentSharedState{
|
||||
ConversationTracker: conversations.NewConversationTracker[string](lastMessageDuration),
|
||||
Reminders: make([]ReminderActionResponse, 0),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ services:
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
image: localai/localai:master-sycl-f32
|
||||
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||
devices:
|
||||
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
||||
- /dev/dri/card1
|
||||
|
||||
@@ -6,7 +6,7 @@ services:
|
||||
environment:
|
||||
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||
- DEBUG=true
|
||||
image: localai/localai:master-cublas-cuda12
|
||||
image: localai/localai:master-cublas-cuda12-ffmpeg-core
|
||||
# For images with python backends, use:
|
||||
# image: localai/localai:master-cublas-cuda12-ffmpeg
|
||||
deploy:
|
||||
|
||||
@@ -5,10 +5,10 @@ services:
|
||||
# Available images with CUDA, ROCm, SYCL, Vulkan
|
||||
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
||||
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||
image: localai/localai:master
|
||||
image: localai/localai:master-ffmpeg-core
|
||||
command:
|
||||
- ${MODEL_NAME:-gemma-3-4b-it-qat}
|
||||
- ${MULTIMODAL_MODEL:-moondream2-20250414}
|
||||
- ${MODEL_NAME:-gemma-3-12b-it-qat}
|
||||
- ${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- ${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- granite-embedding-107m-multilingual
|
||||
healthcheck:
|
||||
@@ -46,20 +46,6 @@ services:
|
||||
image: busybox
|
||||
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
||||
|
||||
sshbox:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.sshbox
|
||||
ports:
|
||||
- "22"
|
||||
environment:
|
||||
- SSH_USER=root
|
||||
- SSH_PASSWORD=root
|
||||
- DOCKER_HOST=tcp://dind:2375
|
||||
depends_on:
|
||||
dind:
|
||||
condition: service_healthy
|
||||
|
||||
mcpbox:
|
||||
build:
|
||||
context: .
|
||||
@@ -105,8 +91,8 @@ services:
|
||||
- 8080:3000
|
||||
#image: quay.io/mudler/localagi:master
|
||||
environment:
|
||||
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-4b-it-qat}
|
||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-moondream2-20250414}
|
||||
- LOCALAGI_MODEL=${MODEL_NAME:-gemma-3-12b-it-qat}
|
||||
- LOCALAGI_MULTIMODAL_MODEL=${MULTIMODAL_MODEL:-minicpm-v-2_6}
|
||||
- LOCALAGI_IMAGE_MODEL=${IMAGE_MODEL:-sd-1.5-ggml}
|
||||
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||
@@ -115,8 +101,7 @@ services:
|
||||
- LOCALAGI_TIMEOUT=5m
|
||||
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||
- LOCALAGI_MCPBOX_URL=http://mcpbox:8080
|
||||
- LOCALAGI_SSHBOX_URL=root:root@sshbox:22
|
||||
extra_hosts:
|
||||
- "host.docker.internal:host-gateway"
|
||||
volumes:
|
||||
- ./volumes/localagi/:/pool
|
||||
- ./volumes/localagi/:/pool
|
||||
43
go.mod
43
go.mod
@@ -5,41 +5,34 @@ go 1.24
|
||||
toolchain go1.24.2
|
||||
|
||||
require (
|
||||
github.com/bwmarrin/discordgo v0.29.0
|
||||
github.com/bwmarrin/discordgo v0.28.1
|
||||
github.com/chasefleming/elem-go v0.30.0
|
||||
github.com/dave-gray101/v2keyauth v0.0.0-20240624150259-c45d584d25e2
|
||||
github.com/donseba/go-htmx v1.12.0
|
||||
github.com/eritikass/githubmarkdownconvertergo v0.1.10
|
||||
github.com/go-telegram/bot v1.15.0
|
||||
github.com/gofiber/fiber/v2 v2.52.8
|
||||
github.com/gofiber/fiber/v2 v2.52.6
|
||||
github.com/gofiber/template/html/v2 v2.1.3
|
||||
github.com/google/go-github/v69 v69.2.0
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/gorilla/websocket v1.5.3
|
||||
github.com/metoro-io/mcp-golang v0.13.0
|
||||
github.com/metoro-io/mcp-golang v0.11.0
|
||||
github.com/onsi/ginkgo/v2 v2.23.4
|
||||
github.com/onsi/gomega v1.37.0
|
||||
github.com/philippgille/chromem-go v0.7.0
|
||||
github.com/robfig/cron/v3 v3.0.1
|
||||
github.com/sashabaranov/go-openai v1.40.0
|
||||
github.com/slack-go/slack v0.17.1
|
||||
github.com/sashabaranov/go-openai v1.39.1
|
||||
github.com/slack-go/slack v0.16.0
|
||||
github.com/thoj/go-ircevent v0.0.0-20210723090443-73e444401d64
|
||||
github.com/tmc/langchaingo v0.1.13
|
||||
github.com/traefik/yaegi v0.16.1
|
||||
github.com/valyala/fasthttp v1.62.0
|
||||
golang.org/x/crypto v0.39.0
|
||||
github.com/valyala/fasthttp v1.61.0
|
||||
golang.org/x/crypto v0.37.0
|
||||
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
|
||||
maunium.net/go/mautrix v0.24.0
|
||||
maunium.net/go/mautrix v0.17.0
|
||||
mvdan.cc/xurls/v2 v2.6.0
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/JohannesKaufmann/dom v0.2.0 // indirect
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/JohannesKaufmann/html-to-markdown/v2 v2.3.3
|
||||
github.com/PuerkitoBio/goquery v1.10.3 // indirect
|
||||
github.com/andybalholm/brotli v1.1.1 // indirect
|
||||
github.com/andybalholm/cascadia v1.3.3 // indirect
|
||||
@@ -52,10 +45,6 @@ require (
|
||||
github.com/bytedance/sonic/loader v0.2.4 // indirect
|
||||
github.com/cloudwego/base64x v0.1.5 // indirect
|
||||
github.com/dlclark/regexp2 v1.11.5 // indirect
|
||||
github.com/emersion/go-imap/v2 v2.0.0-beta.5
|
||||
github.com/emersion/go-message v0.18.2
|
||||
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6
|
||||
github.com/emersion/go-smtp v0.22.0
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 // indirect
|
||||
github.com/gin-contrib/sse v1.1.0 // indirect
|
||||
github.com/gin-gonic/gin v1.10.0 // indirect
|
||||
@@ -71,7 +60,6 @@ require (
|
||||
github.com/gofiber/utils v1.1.0 // indirect
|
||||
github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/gomarkdown/markdown v0.0.0-20250311123330-531bef5e742b
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/pprof v0.0.0-20250423184734-337e5dd93bb4 // indirect
|
||||
@@ -92,7 +80,7 @@ require (
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pkoukk/tiktoken-go v0.1.7 // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/rs/zerolog v1.34.0 // indirect
|
||||
github.com/rs/zerolog v1.31.0 // indirect
|
||||
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||
github.com/temoto/robotstxt v1.1.2 // indirect
|
||||
@@ -104,16 +92,17 @@ require (
|
||||
github.com/ugorji/go/codec v1.2.12 // indirect
|
||||
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||
go.mau.fi/util v0.8.7 // indirect
|
||||
go.mau.fi/util v0.3.0 // indirect
|
||||
go.starlark.net v0.0.0-20250417143717-f57e51f710eb // indirect
|
||||
go.uber.org/automaxprocs v1.6.0 // indirect
|
||||
golang.org/x/arch v0.16.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 // indirect
|
||||
golang.org/x/net v0.40.0 // indirect
|
||||
golang.org/x/sys v0.33.0 // indirect
|
||||
golang.org/x/text v0.26.0 // indirect
|
||||
golang.org/x/tools v0.33.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 // indirect
|
||||
golang.org/x/net v0.39.0 // indirect
|
||||
golang.org/x/sys v0.32.0 // indirect
|
||||
golang.org/x/text v0.24.0 // indirect
|
||||
golang.org/x/tools v0.32.0 // indirect
|
||||
google.golang.org/appengine v1.6.8 // indirect
|
||||
google.golang.org/protobuf v1.36.6 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
maunium.net/go/maulogger/v2 v2.4.1 // indirect
|
||||
)
|
||||
|
||||
98
go.sum
98
go.sum
@@ -1,9 +1,3 @@
|
||||
filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA=
|
||||
filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4=
|
||||
github.com/JohannesKaufmann/dom v0.2.0 h1:1bragmEb19K8lHAqgFgqCpiPCFEZMTXzOIEjuxkUfLQ=
|
||||
github.com/JohannesKaufmann/dom v0.2.0/go.mod h1:57iSUl5RKric4bUkgos4zu6Xt5LMHUnw3TF1l5CbGZo=
|
||||
github.com/JohannesKaufmann/html-to-markdown/v2 v2.3.3 h1:r3fokGFRDk/8pHmwLwJ8zsX4qiqfS1/1TZm2BH8ueY8=
|
||||
github.com/JohannesKaufmann/html-to-markdown/v2 v2.3.3/go.mod h1:HtsP+1Fchp4dVvaiIsLHAl/yqL3H1YLwqLC9kNwqQEg=
|
||||
github.com/PuerkitoBio/goquery v1.10.3 h1:pFYcNSqHxBD06Fpj/KsbStFRsgRATgnf3LeXiUkhzPo=
|
||||
github.com/PuerkitoBio/goquery v1.10.3/go.mod h1:tMUX0zDMHXYlAQk6p35XxQMqMweEKB7iK7iLNd4RH4Y=
|
||||
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
|
||||
@@ -21,8 +15,8 @@ github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPn
|
||||
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
|
||||
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
||||
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
||||
github.com/bwmarrin/discordgo v0.29.0 h1:FmWeXFaKUwrcL3Cx65c20bTRW+vOb6k8AnaP+EgjDno=
|
||||
github.com/bwmarrin/discordgo v0.29.0/go.mod h1:NJZpH+1AfhIcyQsPeuBKsUtYrRnjkyu0kIVMCHkZtRY=
|
||||
github.com/bwmarrin/discordgo v0.28.1 h1:gXsuo2GBO7NbR6uqmrrBDplPUx2T3nzu775q/Rd1aG4=
|
||||
github.com/bwmarrin/discordgo v0.28.1/go.mod h1:NJZpH+1AfhIcyQsPeuBKsUtYrRnjkyu0kIVMCHkZtRY=
|
||||
github.com/bytedance/sonic v1.13.2 h1:8/H1FempDZqC4VqjptGo14QQlJx8VdZJegxs6wwfqpQ=
|
||||
github.com/bytedance/sonic v1.13.2/go.mod h1:o68xyaF9u2gvVBuGHPlUVCy+ZfmNNO5ETf1+KgkJhz4=
|
||||
github.com/bytedance/sonic/loader v0.1.1/go.mod h1:ncP89zfokxS5LZrJxl5z0UJcsk4M4yY2JpfqGeCtNLU=
|
||||
@@ -43,14 +37,6 @@ github.com/dlclark/regexp2 v1.11.5 h1:Q/sSnsKerHeCkc/jSTNq1oCm7KiVgUMZRDUoRu0JQZ
|
||||
github.com/dlclark/regexp2 v1.11.5/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||
github.com/donseba/go-htmx v1.12.0 h1:7tESER0uxaqsuGMv3yP3pK1drfBUXM6apG4H7/3+IgE=
|
||||
github.com/donseba/go-htmx v1.12.0/go.mod h1:8PTAYvNKf8+QYis+DpAsggKz+sa2qljtMgvdAeNBh5s=
|
||||
github.com/emersion/go-imap/v2 v2.0.0-beta.5 h1:H3858DNmBuXyMK1++YrQIRdpKE1MwBc+ywBtg3n+0wA=
|
||||
github.com/emersion/go-imap/v2 v2.0.0-beta.5/go.mod h1:BZTFHsS1hmgBkFlHqbxGLXk2hnRqTItUgwjSSCsYNAk=
|
||||
github.com/emersion/go-message v0.18.2 h1:rl55SQdjd9oJcIoQNhubD2Acs1E6IzlZISRTK7x/Lpg=
|
||||
github.com/emersion/go-message v0.18.2/go.mod h1:XpJyL70LwRvq2a8rVbHXikPgKj8+aI0kGdHlg16ibYA=
|
||||
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6 h1:oP4q0fw+fOSWn3DfFi4EXdT+B+gTtzx8GC9xsc26Znk=
|
||||
github.com/emersion/go-sasl v0.0.0-20241020182733-b788ff22d5a6/go.mod h1:iL2twTeMvZnrg54ZoPDNfJaJaqy0xIQFuBdrLsmspwQ=
|
||||
github.com/emersion/go-smtp v0.22.0 h1:/d3HWxkZZ4riB+0kzfoODh9X+xyCrLEezMnAAa1LEMU=
|
||||
github.com/emersion/go-smtp v0.22.0/go.mod h1:ZtRRkbTyp2XTHCA+BmyTFTrj8xY4I+b4McvHxCU2gsQ=
|
||||
github.com/eritikass/githubmarkdownconvertergo v0.1.10 h1:mL93ADvYMOeT15DcGtK9AaFFc+RcWcy6kQBC6yS/5f4=
|
||||
github.com/eritikass/githubmarkdownconvertergo v0.1.10/go.mod h1:BdpHs6imOtzE5KorbUtKa6bZ0ZBh1yFcrTTAL8FwDKY=
|
||||
github.com/gabriel-vasile/mimetype v1.4.9 h1:5k+WDwEsD9eTLL8Tz3L0VnmVh9QxGjRmjBvAG7U/oYY=
|
||||
@@ -73,8 +59,8 @@ github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1v
|
||||
github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZiAzKg9hl15HA8=
|
||||
github.com/go-telegram/bot v1.15.0 h1:/ba5pp084MUhjR5sQDymQ7JNZ001CQa7QjtxLWcuGpg=
|
||||
github.com/go-telegram/bot v1.15.0/go.mod h1:i2TRs7fXWIeaceF3z7KzsMt/he0TwkVC680mvdTFYeM=
|
||||
github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U=
|
||||
github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
|
||||
github.com/go-test/deep v1.0.4 h1:u2CU3YKy9I2pmu9pX0eq50wCgjfGIt539SqR7FbHiho=
|
||||
github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||
github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4=
|
||||
@@ -82,8 +68,8 @@ github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PU
|
||||
github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI=
|
||||
github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA=
|
||||
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
|
||||
github.com/gofiber/fiber/v2 v2.52.8 h1:xl4jJQ0BV5EJTA2aWiKw/VddRpHrKeZLF0QPUxqn0x4=
|
||||
github.com/gofiber/fiber/v2 v2.52.8/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw=
|
||||
github.com/gofiber/fiber/v2 v2.52.6 h1:Rfp+ILPiYSvvVuIPvxrBns+HJp8qGLDnLJawAu27XVI=
|
||||
github.com/gofiber/fiber/v2 v2.52.6/go.mod h1:YEcBbO/FB+5M1IZNBP9FO3J9281zgPAreiI1oqg8nDw=
|
||||
github.com/gofiber/template v1.8.3 h1:hzHdvMwMo/T2kouz2pPCA0zGiLCeMnoGsQZBTSYgZxc=
|
||||
github.com/gofiber/template v1.8.3/go.mod h1:bs/2n0pSNPOkRa5VJ8zTIvedcI/lEYxzV3+YPXdBvq8=
|
||||
github.com/gofiber/template/html/v2 v2.1.3 h1:n1LYBtmr9C0V/k/3qBblXyMxV5B0o/gpb6dFLp8ea+o=
|
||||
@@ -97,10 +83,9 @@ github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaS
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
|
||||
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250311123330-531bef5e742b h1:EY/KpStFl60qA17CptGXhwfZ+k1sFNJIUNR8DdbcuUk=
|
||||
github.com/gomarkdown/markdown v0.0.0-20250311123330-531bef5e742b/go.mod h1:JDGcbDT52eL4fju3sZ4TeHGsQwhG9nbDV21aMyhwPoA=
|
||||
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||
github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8=
|
||||
github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU=
|
||||
@@ -146,8 +131,8 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
|
||||
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/metoro-io/mcp-golang v0.13.0 h1:54TFBJIW76VRB55CJovQQje9x4GnXg0BQQwGRtXrbCE=
|
||||
github.com/metoro-io/mcp-golang v0.13.0/go.mod h1:ifLP9ZzKpN1UqFWNTpAHOqSvNkMK6b7d1FSZ5Lu0lN0=
|
||||
github.com/metoro-io/mcp-golang v0.11.0 h1:1k+VSE9QaeMTLn0gJ3FgE/DcjsCBsLFnz5eSFbgXUiI=
|
||||
github.com/metoro-io/mcp-golang v0.11.0/go.mod h1:ifLP9ZzKpN1UqFWNTpAHOqSvNkMK6b7d1FSZ5Lu0lN0=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
@@ -174,28 +159,23 @@ github.com/prashantv/gostub v1.1.0/go.mod h1:A5zLQHz7ieHGG7is6LLXLz7I8+3LZzsrV0P
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ=
|
||||
github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/robfig/cron/v3 v3.0.1 h1:WdRxkvbJztn8LMz/QEvLN5sBU+xKpSqwwUO1Pjr4qDs=
|
||||
github.com/robfig/cron/v3 v3.0.1/go.mod h1:eQICP3HwyT7UooqI/z+Ov+PtYAWygg1TEWWzGIFLtro=
|
||||
github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGKbLGBPtR/8/oO74W6hmz0qE5q0z9aqSAewaaM=
|
||||
github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/rs/zerolog v1.34.0 h1:k43nTLIwcTVQAncfCw4KZ2VY6ukYoZaBPNOE8txlOeY=
|
||||
github.com/rs/zerolog v1.34.0/go.mod h1:bJsvje4Z08ROH4Nhs5iH600c3IkWhwp44iRc54W6wYQ=
|
||||
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
|
||||
github.com/rs/zerolog v1.31.0 h1:FcTR3NnLWW+NnTwwhFWiJSZr4ECLpqCm6QsEnyvbV4A=
|
||||
github.com/rs/zerolog v1.31.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
|
||||
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA=
|
||||
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
|
||||
github.com/sashabaranov/go-openai v1.40.0 h1:Peg9Iag5mUJtPW00aYatlsn97YML0iNULiLNe74iPrU=
|
||||
github.com/sashabaranov/go-openai v1.40.0/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
|
||||
github.com/sebdah/goldie/v2 v2.5.5 h1:rx1mwF95RxZ3/83sdS4Yp7t2C5TCokvWP4TBRbAyEWY=
|
||||
github.com/sebdah/goldie/v2 v2.5.5/go.mod h1:oZ9fp0+se1eapSRjfYbsV/0Hqhbuu3bJVvKI/NNtssI=
|
||||
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
|
||||
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
|
||||
github.com/slack-go/slack v0.17.1 h1:x0Mnc6biHBea5vfxLR+x4JFl/Rm3eIo0iS3xDZenX+o=
|
||||
github.com/slack-go/slack v0.17.1/go.mod h1:X+UqOufi3LYQHDnMG1vxf0J8asC6+WllXrVrhl8/Prk=
|
||||
github.com/sashabaranov/go-openai v1.39.1 h1:TMD4w77Iy9WTFlgnjNaxbAASdsCJ9R/rMdzL+SN14oU=
|
||||
github.com/sashabaranov/go-openai v1.39.1/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
|
||||
github.com/slack-go/slack v0.16.0 h1:khp/WCFv+Hb/B/AJaAwvcxKun0hM6grN0bUZ8xG60P8=
|
||||
github.com/slack-go/slack v0.16.0/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
|
||||
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
@@ -227,17 +207,15 @@ github.com/ugorji/go/codec v1.2.12 h1:9LC83zGrHhuUA9l16C9AHXAqEV/2wBQ4nkvumAE65E
|
||||
github.com/ugorji/go/codec v1.2.12/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasthttp v1.62.0 h1:8dKRBX/y2rCzyc6903Zu1+3qN0H/d2MsxPPmVNamiH0=
|
||||
github.com/valyala/fasthttp v1.62.0/go.mod h1:FCINgr4GKdKqV8Q0xv8b+UxPV+H/O5nNFo3D+r54Htg=
|
||||
github.com/valyala/fasthttp v1.61.0 h1:VV08V0AfoRaFurP1EWKvQQdPTZHiUzaVoulX1aBDgzU=
|
||||
github.com/valyala/fasthttp v1.61.0/go.mod h1:wRIV/4cMwUPWnRcDno9hGnYZGh78QzODFfo1LTUhBog=
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
|
||||
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
github.com/xyproto/randomstring v1.0.5/go.mod h1:rgmS5DeNXLivK7YprL0pY+lTuhNQW3iGxZ18UQApw/E=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
github.com/yuin/goldmark v1.7.11 h1:ZCxLyDMtz0nT2HFfsYG8WZ47Trip2+JyLysKcMYE5bo=
|
||||
github.com/yuin/goldmark v1.7.11/go.mod h1:ip/1k0VRfGynBgxOz0yCqHrbZXhcjxyuS66Brc7iBKg=
|
||||
go.mau.fi/util v0.8.7 h1:ywKarPxouJQEEijTs4mPlxC7F4AWEKokEpWc+2TYy6c=
|
||||
go.mau.fi/util v0.8.7/go.mod h1:j6R3cENakc1f8HpQeFl0N15UiSTcNmIfDBNJUbL71RY=
|
||||
go.mau.fi/util v0.3.0 h1:Lt3lbRXP6ZBqTINK0EieRWor3zEwwwrDT14Z5N8RUCs=
|
||||
go.mau.fi/util v0.3.0/go.mod h1:9dGsBCCbZJstx16YgnVMVi3O2bOizELoKpugLD4FoGs=
|
||||
go.starlark.net v0.0.0-20250417143717-f57e51f710eb h1:zOg9DxxrorEmgGUr5UPdCEwKqiqG0MlZciuCuA3XiDE=
|
||||
go.starlark.net v0.0.0-20250417143717-f57e51f710eb/go.mod h1:YKMCv9b1WrfWmeqdV5MAuEHWsu5iC+fe6kYl2sQjdI8=
|
||||
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
|
||||
@@ -251,10 +229,10 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
|
||||
golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||
golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM=
|
||||
golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6 h1:y5zboxd6LQAqYIhHnB48p0ByQ/GnQx2BE33L8BOHQkI=
|
||||
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
|
||||
golang.org/x/crypto v0.37.0 h1:kJNSjF/Xp7kU0iB2Z+9viTPMW4EqqsrywMXLJOOsXSE=
|
||||
golang.org/x/crypto v0.37.0/go.mod h1:vg+k43peMZ0pUMhYmVAWysMK35e6ioLh3wB8ZCAfbVc=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3 h1:hNQpMuAJe5CtcUqCXaWga3FHu+kQvCqcsoVaQgSV60o=
|
||||
golang.org/x/exp v0.0.0-20240112132812-db7319d0e0e3/go.mod h1:idGWGoKP1toJGkd5/ig9ZLuPcZBC3ewk7SzmH0uou08=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
@@ -270,8 +248,8 @@ golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
|
||||
golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4=
|
||||
golang.org/x/net v0.40.0 h1:79Xs7wF06Gbdcg4kdCCIQArK11Z1hr5POQ6+fIYHNuY=
|
||||
golang.org/x/net v0.40.0/go.mod h1:y0hY0exeL2Pku80/zKK7tpntoX23cqL3Oa6njdgRtds=
|
||||
golang.org/x/net v0.39.0 h1:ZCu7HMWDxpXpaiKdhzIfaltL9Lp31x/3fCP11bc6/fY=
|
||||
golang.org/x/net v0.39.0/go.mod h1:X7NRbYVEA+ewNkCNyJ513WmMdQ3BineSwVtN2zD/d+E=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
@@ -293,8 +271,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw=
|
||||
golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/sys v0.32.0 h1:s77OFDvIQeibCmezSnk/q6iAfkdiQaJi4VzroCFrN20=
|
||||
golang.org/x/sys v0.32.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
@@ -304,8 +282,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
|
||||
golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||
golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg=
|
||||
golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
|
||||
golang.org/x/term v0.31.0 h1:erwDkOK1Msy6offm1mOgvspSkslFnIGsFnxOKoufg3o=
|
||||
golang.org/x/term v0.31.0/go.mod h1:R4BeIy7D95HzImkxGkTW1UQTtP54tio2RyHz7PwK0aw=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@@ -317,16 +295,16 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
|
||||
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||
golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M=
|
||||
golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA=
|
||||
golang.org/x/text v0.24.0 h1:dd5Bzh4yt5KYA8f9CJHCP4FB4D51c2c6JvN37xJJkJ0=
|
||||
golang.org/x/text v0.24.0/go.mod h1:L8rBsPeo2pSS+xqN0d5u2ikmjtmoJbDBT1b7nHvFCdU=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/tools v0.33.0 h1:4qz2S3zmRxbGIhDIAgjxvFutSvH5EfnsYrRBj0UI0bc=
|
||||
golang.org/x/tools v0.33.0/go.mod h1:CIJMaWEY88juyUfo7UbgPqbC8rU2OqfAV1h2Qp0oMYI=
|
||||
golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU=
|
||||
golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||
@@ -345,8 +323,10 @@ gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056 h1:6YFJoB+0fUH6X3xU/G2tQqCYg+PkGtnZ5nMR5rpw72g=
|
||||
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056/go.mod h1:OxvTsCwKosqQ1q7B+8FwXqg4rKZ/UG9dUW+g/VL2xH4=
|
||||
maunium.net/go/mautrix v0.24.0 h1:kBeyWhgL1W8/d8BEFlBSlgIpItPgP1l37hzF8cN3R70=
|
||||
maunium.net/go/mautrix v0.24.0/go.mod h1:HqA1HUutQYJkrYRPkK64itARDz79PCec1oWVEB72HVQ=
|
||||
maunium.net/go/maulogger/v2 v2.4.1 h1:N7zSdd0mZkB2m2JtFUsiGTQQAdP0YeFWT7YMc80yAL8=
|
||||
maunium.net/go/maulogger/v2 v2.4.1/go.mod h1:omPuYwYBILeVQobz8uO3XC8DIRuEb5rXYlQSuqrbCho=
|
||||
maunium.net/go/mautrix v0.17.0 h1:scc1qlUbzPn+wc+3eAPquyD+3gZwwy/hBANBm+iGKK8=
|
||||
maunium.net/go/mautrix v0.17.0/go.mod h1:j+puTEQCEydlVxhJ/dQP5chfa26TdvBO7X6F3Ataav8=
|
||||
mvdan.cc/xurls/v2 v2.6.0 h1:3NTZpeTxYVWNSokW3MKeyVkz/j7uYXYiMtXRUfmjbgI=
|
||||
mvdan.cc/xurls/v2 v2.6.0/go.mod h1:bCvEZ1XvdA6wDnxY7jPPjEmigDtvtvPXAD/Exa9IMSk=
|
||||
nullprogram.com/x/optparse v1.0.0/go.mod h1:KdyPE+Igbe0jQUrVfMqDMeJQIJZEuyV7pjYmp6pbG50=
|
||||
|
||||
6
main.go
6
main.go
@@ -24,7 +24,6 @@ var imageModel = os.Getenv("LOCALAGI_IMAGE_MODEL")
|
||||
var conversationDuration = os.Getenv("LOCALAGI_CONVERSATION_DURATION")
|
||||
var localOperatorBaseURL = os.Getenv("LOCALOPERATOR_BASE_URL")
|
||||
var mcpboxURL = os.Getenv("LOCALAGI_MCPBOX_URL")
|
||||
var sshBoxURL = os.Getenv("LOCALAGI_SSHBOX_URL")
|
||||
|
||||
func init() {
|
||||
if baseModel == "" {
|
||||
@@ -66,9 +65,8 @@ func main() {
|
||||
mcpboxURL,
|
||||
localRAG,
|
||||
services.Actions(map[string]string{
|
||||
services.ActionConfigBrowserAgentRunner: localOperatorBaseURL,
|
||||
services.ActionConfigDeepResearchRunner: localOperatorBaseURL,
|
||||
services.ActionConfigSSHBoxURL: sshBoxURL,
|
||||
"browser-agent-runner-base-url": localOperatorBaseURL,
|
||||
"deep-research-runner-base-url": localOperatorBaseURL,
|
||||
}),
|
||||
services.Connectors,
|
||||
services.DynamicPrompts,
|
||||
|
||||
@@ -1,13 +1,33 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
func NewClient(APIKey, URL, timeout string) *openai.Client {
|
||||
type LLMClient interface {
|
||||
CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)
|
||||
CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error)
|
||||
}
|
||||
|
||||
type realClient struct {
|
||||
*openai.Client
|
||||
}
|
||||
|
||||
func (r *realClient) CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
return r.Client.CreateChatCompletion(ctx, req)
|
||||
}
|
||||
|
||||
func (r *realClient) CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error) {
|
||||
return r.Client.CreateImage(ctx, req)
|
||||
}
|
||||
|
||||
// NewClient returns a real OpenAI client as LLMClient
|
||||
func NewClient(APIKey, URL, timeout string) LLMClient {
|
||||
// Set up OpenAI client
|
||||
if APIKey == "" {
|
||||
//log.Fatal("OPENAI_API_KEY environment variable not set")
|
||||
@@ -18,11 +38,12 @@ func NewClient(APIKey, URL, timeout string) *openai.Client {
|
||||
|
||||
dur, err := time.ParseDuration(timeout)
|
||||
if err != nil {
|
||||
xlog.Error("Failed to parse timeout", "error", err)
|
||||
dur = 150 * time.Second
|
||||
}
|
||||
|
||||
config.HTTPClient = &http.Client{
|
||||
Timeout: dur,
|
||||
}
|
||||
return openai.NewClientWithConfig(config)
|
||||
return &realClient{openai.NewClientWithConfig(config)}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import (
|
||||
"github.com/sashabaranov/go-openai/jsonschema"
|
||||
)
|
||||
|
||||
func GenerateTypedJSONWithGuidance(ctx context.Context, client *openai.Client, guidance, model string, i jsonschema.Definition, dst any) error {
|
||||
func GenerateTypedJSONWithGuidance(ctx context.Context, client LLMClient, guidance, model string, i jsonschema.Definition, dst any) error {
|
||||
return GenerateTypedJSONWithConversation(ctx, client, []openai.ChatCompletionMessage{
|
||||
{
|
||||
Role: "user",
|
||||
@@ -19,7 +19,7 @@ func GenerateTypedJSONWithGuidance(ctx context.Context, client *openai.Client, g
|
||||
}, model, i, dst)
|
||||
}
|
||||
|
||||
func GenerateTypedJSONWithConversation(ctx context.Context, client *openai.Client, conv []openai.ChatCompletionMessage, model string, i jsonschema.Definition, dst any) error {
|
||||
func GenerateTypedJSONWithConversation(ctx context.Context, client LLMClient, conv []openai.ChatCompletionMessage, model string, i jsonschema.Definition, dst any) error {
|
||||
toolName := "json"
|
||||
decision := openai.ChatCompletionRequest{
|
||||
Model: model,
|
||||
|
||||
25
pkg/llm/mock_client.go
Normal file
25
pkg/llm/mock_client.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package llm
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
type MockClient struct {
|
||||
CreateChatCompletionFunc func(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error)
|
||||
CreateImageFunc func(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error)
|
||||
}
|
||||
|
||||
func (m *MockClient) CreateChatCompletion(ctx context.Context, req openai.ChatCompletionRequest) (openai.ChatCompletionResponse, error) {
|
||||
if m.CreateChatCompletionFunc != nil {
|
||||
return m.CreateChatCompletionFunc(ctx, req)
|
||||
}
|
||||
return openai.ChatCompletionResponse{}, nil
|
||||
}
|
||||
|
||||
func (m *MockClient) CreateImage(ctx context.Context, req openai.ImageRequest) (openai.ImageResponse, error) {
|
||||
if m.CreateImageFunc != nil {
|
||||
return m.CreateImageFunc(ctx, req)
|
||||
}
|
||||
return openai.ImageResponse{}, nil
|
||||
}
|
||||
@@ -47,9 +47,6 @@ const (
|
||||
ActionCallAgents = "call_agents"
|
||||
ActionShellcommand = "shell-command"
|
||||
ActionSendTelegramMessage = "send-telegram-message"
|
||||
ActionSetReminder = "set_reminder"
|
||||
ActionListReminders = "list_reminders"
|
||||
ActionRemoveReminder = "remove_reminder"
|
||||
)
|
||||
|
||||
var AvailableActions = []string{
|
||||
@@ -84,17 +81,8 @@ var AvailableActions = []string{
|
||||
ActionCallAgents,
|
||||
ActionShellcommand,
|
||||
ActionSendTelegramMessage,
|
||||
ActionSetReminder,
|
||||
ActionListReminders,
|
||||
ActionRemoveReminder,
|
||||
}
|
||||
|
||||
const (
|
||||
ActionConfigBrowserAgentRunner = "browser-agent-runner-base-url"
|
||||
ActionConfigDeepResearchRunner = "deep-research-runner-base-url"
|
||||
ActionConfigSSHBoxURL = "sshbox-url"
|
||||
)
|
||||
|
||||
func Actions(actionsConfigs map[string]string) func(a *state.AgentConfig) func(ctx context.Context, pool *state.AgentPool) []types.Action {
|
||||
return func(a *state.AgentConfig) func(ctx context.Context, pool *state.AgentPool) []types.Action {
|
||||
return func(ctx context.Context, pool *state.AgentPool) []types.Action {
|
||||
@@ -148,9 +136,9 @@ func Action(name, agentName string, config map[string]string, pool *state.AgentP
|
||||
case ActionGithubIssueSearcher:
|
||||
a = actions.NewGithubIssueSearch(config)
|
||||
case ActionBrowserAgentRunner:
|
||||
a = actions.NewBrowserAgentRunner(config, actionsConfigs[ActionConfigBrowserAgentRunner])
|
||||
a = actions.NewBrowserAgentRunner(config, actionsConfigs["browser-agent-runner-base-url"])
|
||||
case ActionDeepResearchRunner:
|
||||
a = actions.NewDeepResearchRunner(config, actionsConfigs[ActionConfigDeepResearchRunner])
|
||||
a = actions.NewDeepResearchRunner(config, actionsConfigs["deep-research-runner-base-url"])
|
||||
case ActionGithubIssueReader:
|
||||
a = actions.NewGithubIssueReader(config)
|
||||
case ActionGithubPRReader:
|
||||
@@ -190,15 +178,9 @@ func Action(name, agentName string, config map[string]string, pool *state.AgentP
|
||||
case ActionCallAgents:
|
||||
a = actions.NewCallAgent(config, agentName, pool.InternalAPI())
|
||||
case ActionShellcommand:
|
||||
a = actions.NewShell(config, actionsConfigs[ActionConfigSSHBoxURL])
|
||||
a = actions.NewShell(config)
|
||||
case ActionSendTelegramMessage:
|
||||
a = actions.NewSendTelegramMessageRunner(config)
|
||||
case ActionSetReminder:
|
||||
a = action.NewReminder()
|
||||
case ActionListReminders:
|
||||
a = action.NewListReminders()
|
||||
case ActionRemoveReminder:
|
||||
a = action.NewRemoveReminder()
|
||||
default:
|
||||
xlog.Error("Action not found", "name", name)
|
||||
return nil, fmt.Errorf("Action not found")
|
||||
@@ -368,20 +350,5 @@ func ActionsConfigMeta() []config.FieldGroup {
|
||||
Label: "Send Telegram Message",
|
||||
Fields: actions.SendTelegramMessageConfigMeta(),
|
||||
},
|
||||
{
|
||||
Name: "set_reminder",
|
||||
Label: "Set Reminder",
|
||||
Fields: []config.Field{},
|
||||
},
|
||||
{
|
||||
Name: "list_reminders",
|
||||
Label: "List Reminders",
|
||||
Fields: []config.Field{},
|
||||
},
|
||||
{
|
||||
Name: "remove_reminder",
|
||||
Label: "Remove Reminder",
|
||||
Fields: []config.Field{},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"log"
|
||||
"strings"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/mudler/LocalAGI/pkg/config"
|
||||
@@ -12,24 +11,21 @@ import (
|
||||
"golang.org/x/crypto/ssh"
|
||||
)
|
||||
|
||||
func NewShell(config map[string]string, sshBoxURL string) *ShellAction {
|
||||
func NewShell(config map[string]string) *ShellAction {
|
||||
return &ShellAction{
|
||||
privateKey: config["privateKey"],
|
||||
user: config["user"],
|
||||
host: config["host"],
|
||||
password: config["password"],
|
||||
customName: config["customName"],
|
||||
customDescription: config["customDescription"],
|
||||
sshBoxURL: sshBoxURL,
|
||||
}
|
||||
}
|
||||
|
||||
type ShellAction struct {
|
||||
privateKey string
|
||||
user, host, password string
|
||||
customName string
|
||||
customDescription string
|
||||
sshBoxURL string
|
||||
privateKey string
|
||||
user, host string
|
||||
customName string
|
||||
customDescription string
|
||||
}
|
||||
|
||||
func (a *ShellAction) Run(ctx context.Context, sharedState *types.AgentSharedState, params types.ActionParams) (types.ActionResult, error) {
|
||||
@@ -50,23 +46,7 @@ func (a *ShellAction) Run(ctx context.Context, sharedState *types.AgentSharedSta
|
||||
result.User = a.user
|
||||
}
|
||||
|
||||
password := a.password
|
||||
if a.sshBoxURL != "" && result.Host == "" && result.User == "" && password == "" {
|
||||
// sshbox url can be root:root@localhost:2222
|
||||
parts := strings.Split(a.sshBoxURL, "@")
|
||||
if len(parts) == 2 {
|
||||
if strings.Contains(parts[0], ":") {
|
||||
userPass := strings.Split(parts[0], ":")
|
||||
result.User = userPass[0]
|
||||
password = userPass[1]
|
||||
} else {
|
||||
result.User = parts[0]
|
||||
}
|
||||
result.Host = parts[1]
|
||||
}
|
||||
}
|
||||
|
||||
output, err := sshCommand(a.privateKey, result.Command, result.User, result.Host, password)
|
||||
output, err := sshCommand(a.privateKey, result.Command, result.User, result.Host)
|
||||
if err != nil {
|
||||
return types.ActionResult{}, err
|
||||
}
|
||||
@@ -75,15 +55,15 @@ func (a *ShellAction) Run(ctx context.Context, sharedState *types.AgentSharedSta
|
||||
}
|
||||
|
||||
func (a *ShellAction) Definition() types.ActionDefinition {
|
||||
name := "run_command"
|
||||
description := "Run a command on a linux environment."
|
||||
name := "shell"
|
||||
description := "Run a shell command on a remote server."
|
||||
if a.customName != "" {
|
||||
name = a.customName
|
||||
}
|
||||
if a.customDescription != "" {
|
||||
description = a.customDescription
|
||||
}
|
||||
if (a.host != "" && a.user != "") || a.sshBoxURL != "" {
|
||||
if a.host != "" && a.user != "" {
|
||||
return types.ActionDefinition{
|
||||
Name: types.ActionDefinitionName(name),
|
||||
Description: description,
|
||||
@@ -124,7 +104,7 @@ func ShellConfigMeta() []config.Field {
|
||||
Name: "privateKey",
|
||||
Label: "Private Key",
|
||||
Type: config.FieldTypeTextarea,
|
||||
Required: false,
|
||||
Required: true,
|
||||
HelpText: "SSH private key for connecting to remote servers",
|
||||
},
|
||||
{
|
||||
@@ -133,12 +113,6 @@ func ShellConfigMeta() []config.Field {
|
||||
Type: config.FieldTypeText,
|
||||
HelpText: "Default SSH user for connecting to remote servers",
|
||||
},
|
||||
{
|
||||
Name: "password",
|
||||
Label: "Default Password",
|
||||
Type: config.FieldTypeText,
|
||||
HelpText: "Default SSH password for connecting to remote servers",
|
||||
},
|
||||
{
|
||||
Name: "host",
|
||||
Label: "Default Host",
|
||||
@@ -160,25 +134,19 @@ func ShellConfigMeta() []config.Field {
|
||||
}
|
||||
}
|
||||
|
||||
func sshCommand(privateKey, command, user, host, password string) (string, error) {
|
||||
|
||||
authMethods := []ssh.AuthMethod{}
|
||||
if password != "" {
|
||||
authMethods = append(authMethods, ssh.Password(password))
|
||||
}
|
||||
if privateKey != "" {
|
||||
// Create signer from private key string
|
||||
key, err := ssh.ParsePrivateKey([]byte(privateKey))
|
||||
if err != nil {
|
||||
log.Fatalf("failed to parse private key: %v", err)
|
||||
}
|
||||
authMethods = append(authMethods, ssh.PublicKeys(key))
|
||||
func sshCommand(privateKey, command, user, host string) (string, error) {
|
||||
// Create signer from private key string
|
||||
key, err := ssh.ParsePrivateKey([]byte(privateKey))
|
||||
if err != nil {
|
||||
log.Fatalf("failed to parse private key: %v", err)
|
||||
}
|
||||
|
||||
// SSH client configuration
|
||||
config := &ssh.ClientConfig{
|
||||
User: user,
|
||||
Auth: authMethods,
|
||||
User: user,
|
||||
Auth: []ssh.AuthMethod{
|
||||
ssh.PublicKeys(key),
|
||||
},
|
||||
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
|
||||
}
|
||||
|
||||
@@ -197,15 +165,12 @@ func sshCommand(privateKey, command, user, host, password string) (string, error
|
||||
defer session.Close()
|
||||
|
||||
// Run a command
|
||||
cmdOut, err := session.CombinedOutput(command)
|
||||
result := string(cmdOut)
|
||||
if strings.TrimSpace(result) == "" {
|
||||
result += "\nCommand has exited with no output"
|
||||
}
|
||||
output, err := session.CombinedOutput(command)
|
||||
if err != nil {
|
||||
result += "\nError: " + err.Error()
|
||||
return "", fmt.Errorf("failed to run: %v", err)
|
||||
}
|
||||
return result, nil
|
||||
|
||||
return string(output), nil
|
||||
}
|
||||
|
||||
func (a *ShellAction) Plannable() bool {
|
||||
|
||||
@@ -20,7 +20,6 @@ const (
|
||||
ConnectorGithubPRs = "github-prs"
|
||||
ConnectorTwitter = "twitter"
|
||||
ConnectorMatrix = "matrix"
|
||||
ConnectorEmail = "email"
|
||||
)
|
||||
|
||||
var AvailableConnectors = []string{
|
||||
@@ -32,7 +31,6 @@ var AvailableConnectors = []string{
|
||||
ConnectorGithubPRs,
|
||||
ConnectorTwitter,
|
||||
ConnectorMatrix,
|
||||
ConnectorEmail,
|
||||
}
|
||||
|
||||
func Connectors(a *state.AgentConfig) []state.Connector {
|
||||
@@ -72,8 +70,6 @@ func Connectors(a *state.AgentConfig) []state.Connector {
|
||||
conns = append(conns, cc)
|
||||
case ConnectorMatrix:
|
||||
conns = append(conns, connectors.NewMatrix(config))
|
||||
case ConnectorEmail:
|
||||
conns = append(conns, connectors.NewEmail(config))
|
||||
}
|
||||
}
|
||||
return conns
|
||||
@@ -121,10 +117,5 @@ func ConnectorsConfigMeta() []config.FieldGroup {
|
||||
Label: "Matrix",
|
||||
Fields: connectors.MatrixConfigMeta(),
|
||||
},
|
||||
{
|
||||
Name: "email",
|
||||
Label: "Email",
|
||||
Fields: connectors.EmailConfigMeta(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -83,27 +83,6 @@ func (d *Discord) Start(a *agent.Agent) {
|
||||
|
||||
dg.StateEnabled = true
|
||||
|
||||
if d.defaultChannel != "" {
|
||||
// handle new conversations
|
||||
a.AddSubscriber(func(ccm openai.ChatCompletionMessage) {
|
||||
xlog.Debug("Subscriber(discord)", "message", ccm.Content)
|
||||
|
||||
// Send the message to the default channel
|
||||
_, err := dg.ChannelMessageSend(d.defaultChannel, ccm.Content)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Error sending message: %v", err))
|
||||
}
|
||||
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("discord:%s", d.defaultChannel),
|
||||
openai.ChatCompletionMessage{
|
||||
Content: ccm.Content,
|
||||
Role: "assistant",
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// Register the messageCreate func as a callback for MessageCreate events.
|
||||
dg.AddHandler(d.messageCreate(a))
|
||||
|
||||
|
||||
@@ -1,457 +0,0 @@
|
||||
package connectors
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"mime"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
htmltomarkdown "github.com/JohannesKaufmann/html-to-markdown/v2"
|
||||
imap "github.com/emersion/go-imap/v2"
|
||||
sasl "github.com/emersion/go-sasl"
|
||||
smtp "github.com/emersion/go-smtp"
|
||||
|
||||
"github.com/emersion/go-imap/v2/imapclient"
|
||||
"github.com/emersion/go-message"
|
||||
"github.com/emersion/go-message/charset"
|
||||
"github.com/gomarkdown/markdown"
|
||||
"github.com/gomarkdown/markdown/html"
|
||||
"github.com/gomarkdown/markdown/parser"
|
||||
|
||||
"github.com/mudler/LocalAGI/core/agent"
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/mudler/LocalAGI/pkg/config"
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
|
||||
type Email struct {
|
||||
username string
|
||||
name string
|
||||
password string
|
||||
email string
|
||||
smtpServer string
|
||||
smtpInsecure bool
|
||||
imapServer string
|
||||
imapInsecure bool
|
||||
defaultEmail string
|
||||
}
|
||||
|
||||
func NewEmail(config map[string]string) *Email {
|
||||
|
||||
return &Email{
|
||||
username: config["username"],
|
||||
name: config["name"],
|
||||
password: config["password"],
|
||||
email: config["email"],
|
||||
smtpServer: config["smtpServer"],
|
||||
smtpInsecure: config["smtpInsecure"] == "true",
|
||||
imapServer: config["imapServer"],
|
||||
imapInsecure: config["imapInsecure"] == "true",
|
||||
defaultEmail: config["defaultEmail"],
|
||||
}
|
||||
}
|
||||
|
||||
func EmailConfigMeta() []config.Field {
|
||||
return []config.Field{
|
||||
{
|
||||
Name: "smtpServer",
|
||||
Label: "SMTP Host:port",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "SMTP server host:port (e.g., smtp.gmail.com:587)",
|
||||
},
|
||||
{
|
||||
Name: "smtpInsecure",
|
||||
Label: "Insecure SMTP",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
},
|
||||
{
|
||||
Name: "imapServer",
|
||||
Label: "IMAP Host:port",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "IMAP server host:port (e.g., imap.gmail.com:993)",
|
||||
},
|
||||
{
|
||||
Name: "imapInsecure",
|
||||
Label: "Insecure IMAP",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
},
|
||||
{
|
||||
Name: "username",
|
||||
Label: "Username",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "Username/email address",
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
Label: "Friendly Name",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "Friendly name of sender",
|
||||
},
|
||||
{
|
||||
Name: "password",
|
||||
Label: "Password",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "SMTP/IMAP password or app password",
|
||||
},
|
||||
{
|
||||
Name: "email",
|
||||
Label: "From Email",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
HelpText: "Agent email address",
|
||||
},
|
||||
{
|
||||
Name: "defaultEmail",
|
||||
Label: "Default Recipient",
|
||||
Type: config.FieldTypeText,
|
||||
HelpText: "Default email address to send messages to when the agent wants to initiate a conversation",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Email) AgentResultCallback() func(state types.ActionState) {
|
||||
return func(state types.ActionState) {
|
||||
// Send the result to the bot
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Email) AgentReasoningCallback() func(state types.ActionCurrentState) bool {
|
||||
return func(state types.ActionCurrentState) bool {
|
||||
// Send the reasoning to the bot
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
func filterEmailRecipients(input string, emailToRemove string) string {
|
||||
|
||||
addresses := strings.Split(strings.TrimPrefix(input, "To: "), ",")
|
||||
|
||||
var filtered []string
|
||||
for _, address := range addresses {
|
||||
address = strings.TrimSpace(address)
|
||||
if !strings.Contains(address, emailToRemove) {
|
||||
filtered = append(filtered, address)
|
||||
}
|
||||
}
|
||||
|
||||
if len(filtered) > 0 {
|
||||
return strings.Join(filtered, ", ")
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (e *Email) sendMail(to, subject, content, replyToID, references string, emails []string, html bool) {
|
||||
|
||||
auth := sasl.NewPlainClient("", e.username, e.password)
|
||||
|
||||
contentType := "text/plain"
|
||||
if html {
|
||||
contentType = "text/html"
|
||||
}
|
||||
|
||||
var replyHeaders string
|
||||
if replyToID != "" {
|
||||
referenceLine := strings.ReplaceAll(references+" "+replyToID, "\n", "")
|
||||
replyHeaders = fmt.Sprintf("In-Reply-To: %s\r\nReferences: %s\r\n", replyToID, referenceLine)
|
||||
}
|
||||
|
||||
// Build full message content
|
||||
var builder strings.Builder
|
||||
fmt.Fprintf(&builder, "To: %s\r\n", to)
|
||||
fmt.Fprintf(&builder, "From: %s <%s>\r\n", e.name, e.email)
|
||||
builder.WriteString(replyHeaders)
|
||||
fmt.Fprintf(&builder, "MIME-Version: 1.0\r\nContent-Type: %s;\r\n", contentType)
|
||||
fmt.Fprintf(&builder, "Subject: %s\r\n\r\n", subject)
|
||||
fmt.Fprintf(&builder, "%s\r\n", content)
|
||||
msg := strings.NewReader(builder.String())
|
||||
|
||||
if !e.smtpInsecure {
|
||||
|
||||
err := smtp.SendMail(e.smtpServer, auth, e.email, emails, msg)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email send err: %v", err))
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
c, err := smtp.Dial(e.smtpServer)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email connection err: %v", err))
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
err = c.Hello("client")
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email hello err: %v", err))
|
||||
}
|
||||
|
||||
err = c.Auth(auth)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email auth err: %v", err))
|
||||
}
|
||||
|
||||
err = c.SendMail(e.email, emails, msg)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email send err: %v", err))
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func imapWorker(done chan bool, e *Email, a *agent.Agent, c *imapclient.Client, startIndex uint32) {
|
||||
|
||||
currentIndex := startIndex
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
|
||||
xlog.Info("Stopping imapWorker")
|
||||
err := c.Logout().Wait()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP logout fail: %v", err))
|
||||
}
|
||||
return
|
||||
|
||||
default:
|
||||
|
||||
selectedMbox, err := c.Select("INBOX", nil).Wait()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP mailbox err: %v", err))
|
||||
}
|
||||
|
||||
// Loop over any new messages recieved in selected mailbox
|
||||
for currentIndex < selectedMbox.NumMessages {
|
||||
|
||||
currentIndex++
|
||||
|
||||
// Download email info
|
||||
seqSet := imap.SeqSetNum(currentIndex)
|
||||
bodySection := &imap.FetchItemBodySection{}
|
||||
fetchOptions := &imap.FetchOptions{
|
||||
Flags: true,
|
||||
Envelope: true,
|
||||
BodySection: []*imap.FetchItemBodySection{bodySection},
|
||||
}
|
||||
messageBuffers, err := c.Fetch(seqSet, fetchOptions).Collect()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP fetch err: %v", err))
|
||||
}
|
||||
|
||||
// Start conversation goroutine
|
||||
go func(e *Email, a *agent.Agent, c *imapclient.Client, fmb *imapclient.FetchMessageBuffer) {
|
||||
|
||||
// Download Email contents
|
||||
r := bytes.NewReader(fmb.FindBodySection(bodySection))
|
||||
msg, err := message.Read(r)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email reader err: %v", err))
|
||||
}
|
||||
buf := new(bytes.Buffer)
|
||||
buf.ReadFrom(msg.Body)
|
||||
|
||||
xlog.Debug("New email!")
|
||||
xlog.Debug(fmt.Sprintf("From: %s", msg.Header.Get("From")))
|
||||
xlog.Debug(fmt.Sprintf("To: %s", msg.Header.Get("To")))
|
||||
xlog.Debug(fmt.Sprintf("Subject: %s", msg.Header.Get("Subject")))
|
||||
|
||||
// In the event that an email account has multiple email addresses, only respond to the one configured
|
||||
if !strings.Contains(msg.Header.Get("To"), e.email) {
|
||||
xlog.Info(fmt.Sprintf("Email was sent to %s, but appeared in my inbox (%s). Ignoring!", msg.Header.Get("To"), e.email))
|
||||
return
|
||||
}
|
||||
|
||||
content := buf.String()
|
||||
contentIsHTML := false
|
||||
|
||||
// Convert email to markdown only if it's in HTML
|
||||
prefixes := []string{"<html", "<body", "<div", "<head"}
|
||||
for _, prefix := range prefixes {
|
||||
if strings.HasPrefix(strings.ToLower(content), prefix) {
|
||||
content, err = htmltomarkdown.ConvertString(buf.String())
|
||||
contentIsHTML = true
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email html => md err: %v", err))
|
||||
contentIsHTML = false
|
||||
content = buf.String()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
xlog.Debug(fmt.Sprintf("Markdown:\n\n%s", content))
|
||||
|
||||
// Construct prompt
|
||||
prompt := fmt.Sprintf("%s %s:\n\nFrom: %s\nTime: %s\nSubject: %s\n=====\n%s",
|
||||
"This email thread was sent to you. You are",
|
||||
e.email,
|
||||
msg.Header.Get("From"),
|
||||
fmb.Envelope.Date.Format(time.RFC3339),
|
||||
fmb.Envelope.Subject,
|
||||
content,
|
||||
)
|
||||
conv := []openai.ChatCompletionMessage{}
|
||||
conv = append(conv, openai.ChatCompletionMessage{Role: "user", Content: prompt})
|
||||
|
||||
// Send prompt to agent and wait for result
|
||||
xlog.Debug(fmt.Sprintf("Starting conversation:\n\n%v", conv))
|
||||
jobResult := a.Ask(types.WithConversationHistory(conv))
|
||||
if jobResult.Error != nil {
|
||||
xlog.Error(fmt.Sprintf("Error asking agent: %v", jobResult.Error))
|
||||
}
|
||||
|
||||
// Send agent response to user, replying to original email.
|
||||
xlog.Debug("Agent finished responding. Sending reply email to user")
|
||||
|
||||
// Get a list of emails to respond to ("Reply All" logic)
|
||||
// This could be done through regex, but it's probably safer to rebuild explicitly
|
||||
fromEmail := fmt.Sprintf("%s@%s", fmb.Envelope.From[0].Mailbox, fmb.Envelope.From[0].Host)
|
||||
emails := []string{}
|
||||
emails = append(emails, fromEmail)
|
||||
|
||||
for _, addr := range fmb.Envelope.To {
|
||||
if addr.Mailbox != "" && addr.Host != "" {
|
||||
email := fmt.Sprintf("%s@%s", addr.Mailbox, addr.Host)
|
||||
if email != e.email {
|
||||
emails = append(emails, email)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Keep the original header, in case sender had contact names as part of the header
|
||||
newToHeader := msg.Header.Get("From") + ", " + filterEmailRecipients(msg.Header.Get("To"), e.email)
|
||||
|
||||
// Create the body of the email
|
||||
replyContent := jobResult.Response
|
||||
if jobResult.Response == "" {
|
||||
replyContent =
|
||||
"System: I'm sorry, but it looks like the agent did not respond. " +
|
||||
"This could be in error, or maybe it had nothing to say."
|
||||
}
|
||||
|
||||
// Quote the original message. This lets the agent see conversation history and is an email standard.
|
||||
quoteHeader := fmt.Sprintf("\r\n\r\nOn %s, %s wrote:\n",
|
||||
fmb.Envelope.Date.Format("Monday, Jan 2, 2006 at 15:04"),
|
||||
fmt.Sprintf("%s <%s>", fmb.Envelope.From[0].Name, fromEmail),
|
||||
)
|
||||
quotedLines := strings.Split(strings.ReplaceAll(content, "\r\n", "\n"), "\n")
|
||||
for i, line := range quotedLines {
|
||||
quotedLines[i] = "> " + line
|
||||
}
|
||||
replyContent = replyContent + quoteHeader + strings.Join(quotedLines, "\r\n")
|
||||
|
||||
// If the original email was sent in HTML, reply with HTML
|
||||
if contentIsHTML {
|
||||
p := parser.NewWithExtensions(parser.CommonExtensions | parser.AutoHeadingIDs | parser.NoEmptyLineBeforeBlock)
|
||||
doc := p.Parse([]byte(replyContent))
|
||||
|
||||
opts := html.RendererOptions{Flags: html.CommonFlags | html.HrefTargetBlank | html.CompletePage}
|
||||
renderer := html.NewRenderer(opts)
|
||||
|
||||
replyContent = string(markdown.Render(doc, renderer))
|
||||
}
|
||||
|
||||
// Send the email
|
||||
e.sendMail(newToHeader,
|
||||
fmt.Sprintf("Re: %s", msg.Header.Get("Subject")),
|
||||
replyContent,
|
||||
msg.Header.Get("Message-ID"),
|
||||
msg.Header.Get("References"),
|
||||
emails,
|
||||
contentIsHTML,
|
||||
)
|
||||
}(e, a, c, messageBuffers[0])
|
||||
}
|
||||
time.Sleep(5 * time.Second) // Refresh inbox every n seconds
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (e *Email) Start(a *agent.Agent) {
|
||||
go func() {
|
||||
if e.defaultEmail != "" {
|
||||
// handle new conversations
|
||||
a.AddSubscriber(func(ccm openai.ChatCompletionMessage) {
|
||||
xlog.Debug("Subscriber(email)", "message", ccm.Content)
|
||||
|
||||
// Send the message to the default email
|
||||
e.sendMail(
|
||||
e.defaultEmail,
|
||||
"Message from LocalAGI",
|
||||
ccm.Content,
|
||||
"",
|
||||
"",
|
||||
[]string{e.defaultEmail},
|
||||
false,
|
||||
)
|
||||
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("email:%s", e.defaultEmail),
|
||||
openai.ChatCompletionMessage{
|
||||
Content: ccm.Content,
|
||||
Role: "assistant",
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
xlog.Info("Email connector is now running. Press CTRL-C to exit.")
|
||||
// IMAP dial
|
||||
imapOpts := &imapclient.Options{WordDecoder: &mime.WordDecoder{CharsetReader: charset.Reader}}
|
||||
var c *imapclient.Client
|
||||
var err error
|
||||
if e.imapInsecure {
|
||||
c, err = imapclient.DialInsecure(e.imapServer, imapOpts)
|
||||
} else {
|
||||
c, err = imapclient.DialTLS(e.imapServer, imapOpts)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP dial err: %v", err))
|
||||
return
|
||||
}
|
||||
defer c.Close()
|
||||
|
||||
// IMAP login
|
||||
err = c.Login(e.username, e.password).Wait()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP login err: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
// IMAP mailbox
|
||||
mailboxes, err := c.List("", "%", nil).Collect()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Email IMAP mailbox err: %v", err))
|
||||
return
|
||||
}
|
||||
|
||||
xlog.Debug(fmt.Sprintf("Email IMAP mailbox count: %v", len(mailboxes)))
|
||||
for _, mbox := range mailboxes {
|
||||
xlog.Debug(fmt.Sprintf(" - %v", mbox.Mailbox))
|
||||
}
|
||||
|
||||
// Select INBOX
|
||||
selectedMbox, err := c.Select("INBOX", nil).Wait()
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Cannot select INBOX mailbox! %v", err))
|
||||
return
|
||||
}
|
||||
xlog.Debug(fmt.Sprintf("INBOX contains %v messages", selectedMbox.NumMessages))
|
||||
|
||||
// Start checking INBOX for new mail
|
||||
imapWorkerHandle := make(chan bool)
|
||||
go imapWorker(imapWorkerHandle, e, a, c, selectedMbox.NumMessages)
|
||||
|
||||
<-a.Context().Done()
|
||||
imapWorkerHandle <- true
|
||||
xlog.Info("Email connector is now stopped.")
|
||||
|
||||
}()
|
||||
}
|
||||
@@ -70,52 +70,6 @@ func (i *IRC) Start(a *agent.Agent) {
|
||||
return
|
||||
}
|
||||
i.conn.UseTLS = false
|
||||
|
||||
if i.channel != "" {
|
||||
// handle new conversations
|
||||
a.AddSubscriber(func(ccm openai.ChatCompletionMessage) {
|
||||
xlog.Debug("Subscriber(irc)", "message", ccm.Content)
|
||||
|
||||
// Split the response into multiple messages if it's too long
|
||||
maxLength := 400 // Safe limit for most IRC servers
|
||||
response := ccm.Content
|
||||
|
||||
// Handle multiline responses
|
||||
lines := strings.Split(response, "\n")
|
||||
for _, line := range lines {
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
// Split long lines
|
||||
for len(line) > 0 {
|
||||
var chunk string
|
||||
if len(line) > maxLength {
|
||||
chunk = line[:maxLength]
|
||||
line = line[maxLength:]
|
||||
} else {
|
||||
chunk = line
|
||||
line = ""
|
||||
}
|
||||
|
||||
// Send the message to the channel
|
||||
i.conn.Privmsg(i.channel, chunk)
|
||||
|
||||
// Small delay to prevent flooding
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
}
|
||||
}
|
||||
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("irc:%s", i.channel),
|
||||
openai.ChatCompletionMessage{
|
||||
Content: ccm.Content,
|
||||
Role: "assistant",
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
i.conn.AddCallback("001", func(e *irc.Event) {
|
||||
xlog.Info("Connected to IRC server", "server", i.server, "arguments", e.Arguments)
|
||||
i.conn.Join(i.channel)
|
||||
|
||||
@@ -3,7 +3,6 @@ package connectors
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"slices"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
@@ -115,7 +114,7 @@ func (m *Matrix) cancelActiveJobForRoom(roomID string) {
|
||||
func (m *Matrix) handleRoomMessage(a *agent.Agent, evt *event.Event) {
|
||||
if m.roomID != evt.RoomID.String() && m.roomMode { // If we have a roomID and it's not the same as the event room
|
||||
// Skip messages from other rooms
|
||||
xlog.Info("Skipping reply to room", "event room", evt.RoomID, "config room", m.roomID)
|
||||
xlog.Info("Skipping reply to room", evt.RoomID, m.roomID)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -126,13 +125,17 @@ func (m *Matrix) handleRoomMessage(a *agent.Agent, evt *event.Event) {
|
||||
|
||||
// Skip if message does not mention the bot
|
||||
mentioned := false
|
||||
msg := evt.Content.AsMessage()
|
||||
if msg.Mentions != nil {
|
||||
mentioned = slices.Contains(evt.Content.AsMessage().Mentions.UserIDs, m.client.UserID)
|
||||
if evt.Content.AsMessage().Mentions != nil {
|
||||
for _, mention := range evt.Content.AsMessage().Mentions.UserIDs {
|
||||
if mention == m.client.UserID {
|
||||
mentioned = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !mentioned && !m.roomMode {
|
||||
xlog.Info("Skipping reply because it does not mention the bot", "mentions", evt.Content.AsMessage().Mentions.UserIDs)
|
||||
xlog.Info("Skipping reply because it does not mention the bot", evt.RoomID, m.roomID)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -160,7 +163,7 @@ func (m *Matrix) handleRoomMessage(a *agent.Agent, evt *event.Event) {
|
||||
agentOptions = append(agentOptions, types.WithConversationHistory(currentConv))
|
||||
|
||||
// Add room to metadata for tracking
|
||||
metadata := map[string]any{
|
||||
metadata := map[string]interface{}{
|
||||
"room": evt.RoomID.String(),
|
||||
}
|
||||
agentOptions = append(agentOptions, types.WithMetadata(metadata))
|
||||
@@ -178,7 +181,7 @@ func (m *Matrix) handleRoomMessage(a *agent.Agent, evt *event.Event) {
|
||||
job.Cancel()
|
||||
for i, j := range m.activeJobs[evt.RoomID.String()] {
|
||||
if j.UUID == job.UUID {
|
||||
m.activeJobs[evt.RoomID.String()] = slices.Delete(m.activeJobs[evt.RoomID.String()], i, i+1)
|
||||
m.activeJobs[evt.RoomID.String()] = append(m.activeJobs[evt.RoomID.String()][:i], m.activeJobs[evt.RoomID.String()][i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
@@ -215,6 +218,7 @@ func (m *Matrix) handleRoomMessage(a *agent.Agent, evt *event.Event) {
|
||||
}
|
||||
|
||||
func (m *Matrix) Start(a *agent.Agent) {
|
||||
// Create Matrix client
|
||||
client, err := mautrix.NewClient(m.homeserverURL, id.UserID(m.userID), m.accessToken)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Error creating Matrix client: %v", err))
|
||||
@@ -223,24 +227,7 @@ func (m *Matrix) Start(a *agent.Agent) {
|
||||
xlog.Info("Matrix client created")
|
||||
m.client = client
|
||||
|
||||
if m.roomID != "" {
|
||||
// handle new conversations
|
||||
a.AddSubscriber(func(ccm openai.ChatCompletionMessage) {
|
||||
xlog.Debug("Subscriber(matrix)", "message", ccm.Content)
|
||||
_, err := m.client.SendText(context.Background(), id.RoomID(m.roomID), ccm.Content)
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Error posting message: %v", err))
|
||||
}
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("matrix:%s", m.roomID),
|
||||
openai.ChatCompletionMessage{
|
||||
Content: ccm.Content,
|
||||
Role: "assistant",
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
// Set up event handler
|
||||
syncer := client.Syncer.(*mautrix.DefaultSyncer)
|
||||
syncer.OnEventType(event.EventMessage, func(ctx context.Context, evt *event.Event) {
|
||||
xlog.Info("Received message", evt.Content.AsMessage().Body)
|
||||
@@ -262,32 +249,24 @@ func (m *Matrix) Start(a *agent.Agent) {
|
||||
//m.handleRoomMessage(a, evt)
|
||||
})
|
||||
|
||||
// This prevents the agent from picking up a backlog of messages and swamping the chat with responses.
|
||||
syncer.FilterJSON = &mautrix.Filter{
|
||||
Room: mautrix.RoomFilter{
|
||||
Timeline: mautrix.FilterPart{
|
||||
Limit: 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Start syncing
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-a.Context().Done():
|
||||
xlog.Info("Context cancelled, stopping sync loop")
|
||||
return
|
||||
default:
|
||||
err := client.SyncWithContext(a.Context())
|
||||
err := client.SyncWithContext(a.Context())
|
||||
|
||||
xlog.Info("Syncing")
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Error syncing: %v", err))
|
||||
time.Sleep(5 * time.Second)
|
||||
}
|
||||
xlog.Info("Syncing")
|
||||
if err != nil {
|
||||
xlog.Error(fmt.Sprintf("Error syncing: %v", err))
|
||||
time.Sleep(5 * time.Second)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
// Handle shutdown
|
||||
go func() {
|
||||
<-a.Context().Done()
|
||||
client.StopSync()
|
||||
}()
|
||||
}
|
||||
|
||||
// MatrixConfigMeta returns the metadata for Matrix connector configuration fields
|
||||
@@ -296,35 +275,30 @@ func MatrixConfigMeta() []config.Field {
|
||||
{
|
||||
Name: "homeserverURL",
|
||||
Label: "Homeserver URL",
|
||||
HelpText: "e.g. http://host.docker.internal:8008",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "userID",
|
||||
Label: "User ID",
|
||||
HelpText: "e.g. @bot:host",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "accessToken",
|
||||
Label: "Access Token",
|
||||
HelpText: "Token obtained from _matrix/client/v3/login",
|
||||
Type: config.FieldTypeText,
|
||||
Required: true,
|
||||
},
|
||||
{
|
||||
Name: "roomID",
|
||||
Label: "Internal Room ID",
|
||||
HelpText: "The autogenerated unique identifier for a room",
|
||||
Type: config.FieldTypeText,
|
||||
Name: "roomID",
|
||||
Label: "Room ID",
|
||||
Type: config.FieldTypeText,
|
||||
},
|
||||
{
|
||||
Name: "roomMode",
|
||||
Label: "Room Mode",
|
||||
HelpText: "Respond to all messages in the specified room",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
Name: "roomMode",
|
||||
Label: "Room Mode",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,240 +44,7 @@ type Telegram struct {
|
||||
activeJobs map[int64][]*types.Job // map[chatID]bool to track if a chat has active processing
|
||||
activeJobsMutex sync.RWMutex
|
||||
|
||||
channelID string
|
||||
groupMode bool
|
||||
mentionOnly bool
|
||||
}
|
||||
|
||||
// isBotMentioned checks if the bot is mentioned in the message
|
||||
func (t *Telegram) isBotMentioned(message string, botUsername string) bool {
|
||||
return strings.Contains(message, "@"+botUsername)
|
||||
}
|
||||
|
||||
// handleGroupMessage handles messages in group chats
|
||||
func (t *Telegram) handleGroupMessage(ctx context.Context, b *bot.Bot, a *agent.Agent, update *models.Update) {
|
||||
xlog.Debug("Handling group message", "update", update)
|
||||
if !t.groupMode {
|
||||
xlog.Debug("Group mode is disabled, skipping group message", "chatID", update.Message.Chat.ID)
|
||||
return
|
||||
}
|
||||
|
||||
// Get bot info to check username
|
||||
botInfo, err := b.GetMe(ctx)
|
||||
if err != nil {
|
||||
xlog.Error("Error getting bot info", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Skip messages from ourselves
|
||||
if update.Message.From.Username == botInfo.Username {
|
||||
return
|
||||
}
|
||||
|
||||
// If mention-only mode is enabled, check if bot is mentioned
|
||||
if t.mentionOnly && !t.isBotMentioned(update.Message.Text, botInfo.Username) {
|
||||
xlog.Debug("Bot not mentioned in message, skipping", "chatID", update.Message.Chat.ID)
|
||||
return
|
||||
}
|
||||
|
||||
// Cancel any active job for this chat before starting a new one
|
||||
t.cancelActiveJobForChat(update.Message.Chat.ID)
|
||||
|
||||
currentConv := a.SharedState().ConversationTracker.GetConversation(fmt.Sprintf("telegram:%d", update.Message.Chat.ID))
|
||||
|
||||
// Clean up the message by removing bot mentions
|
||||
message := strings.ReplaceAll(update.Message.Text, "@"+botInfo.Username, "")
|
||||
message = strings.TrimSpace(message)
|
||||
|
||||
// Send initial placeholder message
|
||||
msg, err := b.SendMessage(ctx, &bot.SendMessageParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
Text: bot.EscapeMarkdown(telegramThinkingMessage),
|
||||
ParseMode: models.ParseModeMarkdown,
|
||||
ReplyParameters: &models.ReplyParameters{
|
||||
MessageID: update.Message.ID,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error sending initial message", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Store the UUID->placeholder message mapping
|
||||
jobUUID := fmt.Sprintf("%d", msg.ID)
|
||||
|
||||
t.placeholderMutex.Lock()
|
||||
t.placeholders[jobUUID] = msg.ID
|
||||
t.placeholderMutex.Unlock()
|
||||
|
||||
// Add chat ID to metadata for tracking
|
||||
metadata := map[string]interface{}{
|
||||
"chatID": update.Message.Chat.ID,
|
||||
}
|
||||
|
||||
// Handle images if present
|
||||
if len(update.Message.Photo) > 0 {
|
||||
// Get the largest photo
|
||||
photo := update.Message.Photo[len(update.Message.Photo)-1]
|
||||
|
||||
// Download the photo
|
||||
file, err := b.GetFile(ctx, &bot.GetFileParams{
|
||||
FileID: photo.FileID,
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error getting file", "error", err)
|
||||
} else {
|
||||
// Download the file content
|
||||
resp, err := http.Get(file.FilePath)
|
||||
if err != nil {
|
||||
xlog.Error("Error downloading file", "error", err)
|
||||
} else {
|
||||
defer resp.Body.Close()
|
||||
imageBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
xlog.Error("Error reading image", "error", err)
|
||||
} else {
|
||||
// Encode to base64
|
||||
imgBase64 := base64.StdEncoding.EncodeToString(imageBytes)
|
||||
|
||||
// Add to conversation as multi-content message
|
||||
currentConv = append(currentConv, openai.ChatCompletionMessage{
|
||||
Role: "user",
|
||||
MultiContent: []openai.ChatMessagePart{
|
||||
{
|
||||
Text: message,
|
||||
Type: openai.ChatMessagePartTypeText,
|
||||
},
|
||||
{
|
||||
Type: openai.ChatMessagePartTypeImageURL,
|
||||
ImageURL: &openai.ChatMessageImageURL{
|
||||
URL: fmt.Sprintf("data:image/jpeg;base64,%s", imgBase64),
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
currentConv = append(currentConv, openai.ChatCompletionMessage{
|
||||
Content: message,
|
||||
Role: "user",
|
||||
})
|
||||
}
|
||||
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("telegram:%d", update.Message.Chat.ID),
|
||||
currentConv[len(currentConv)-1],
|
||||
)
|
||||
|
||||
// Create a new job with the conversation history and metadata
|
||||
job := types.NewJob(
|
||||
types.WithConversationHistory(currentConv),
|
||||
types.WithUUID(jobUUID),
|
||||
types.WithMetadata(metadata),
|
||||
)
|
||||
|
||||
// Mark this chat as having an active job
|
||||
t.activeJobsMutex.Lock()
|
||||
t.activeJobs[update.Message.Chat.ID] = append(t.activeJobs[update.Message.Chat.ID], job)
|
||||
t.activeJobsMutex.Unlock()
|
||||
|
||||
defer func() {
|
||||
// Mark job as complete
|
||||
t.activeJobsMutex.Lock()
|
||||
job.Cancel()
|
||||
for i, j := range t.activeJobs[update.Message.Chat.ID] {
|
||||
if j.UUID == job.UUID {
|
||||
t.activeJobs[update.Message.Chat.ID] = append(t.activeJobs[update.Message.Chat.ID][:i], t.activeJobs[update.Message.Chat.ID][i+1:]...)
|
||||
break
|
||||
}
|
||||
}
|
||||
t.activeJobsMutex.Unlock()
|
||||
|
||||
// Clean up the placeholder map
|
||||
t.placeholderMutex.Lock()
|
||||
delete(t.placeholders, jobUUID)
|
||||
t.placeholderMutex.Unlock()
|
||||
}()
|
||||
|
||||
res := a.Ask(
|
||||
types.WithConversationHistory(currentConv),
|
||||
types.WithUUID(jobUUID),
|
||||
types.WithMetadata(metadata),
|
||||
)
|
||||
|
||||
if res.Response == "" {
|
||||
xlog.Error("Empty response from agent")
|
||||
_, err := b.EditMessageText(ctx, &bot.EditMessageTextParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
MessageID: msg.ID,
|
||||
Text: "there was an internal error. try again!",
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error updating error message", "error", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
a.SharedState().ConversationTracker.AddMessage(
|
||||
fmt.Sprintf("telegram:%d", update.Message.Chat.ID),
|
||||
openai.ChatCompletionMessage{
|
||||
Content: res.Response,
|
||||
Role: "assistant",
|
||||
},
|
||||
)
|
||||
|
||||
// Handle any multimedia content in the response and collect URLs
|
||||
urls, err := t.handleMultimediaContent(ctx, update.Message.Chat.ID, res)
|
||||
if err != nil {
|
||||
xlog.Error("Error handling multimedia content", "error", err)
|
||||
}
|
||||
|
||||
// Update the message with the final response
|
||||
formattedResponse := formatResponseWithURLs(res.Response, urls)
|
||||
|
||||
// Split the message if it's too long
|
||||
messages := xstrings.SplitParagraph(formattedResponse, telegramMaxMessageLength)
|
||||
|
||||
if len(messages) == 0 {
|
||||
_, err := b.EditMessageText(ctx, &bot.EditMessageTextParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
MessageID: msg.ID,
|
||||
Text: "there was an internal error. try again!",
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error updating error message", "error", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Update the first message
|
||||
_, err = b.EditMessageText(ctx, &bot.EditMessageTextParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
MessageID: msg.ID,
|
||||
Text: messages[0],
|
||||
ParseMode: models.ParseModeMarkdown,
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error updating message", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Send additional chunks as new messages
|
||||
for i := 1; i < len(messages); i++ {
|
||||
_, err = b.SendMessage(ctx, &bot.SendMessageParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
Text: messages[i],
|
||||
ParseMode: models.ParseModeMarkdown,
|
||||
ReplyParameters: &models.ReplyParameters{
|
||||
MessageID: update.Message.ID,
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error sending additional message", "error", err)
|
||||
}
|
||||
}
|
||||
channelID string
|
||||
}
|
||||
|
||||
// Send any text message to the bot after the bot has been started
|
||||
@@ -445,14 +212,10 @@ func formatResponseWithURLs(response string, urls []string) string {
|
||||
}
|
||||
|
||||
func (t *Telegram) handleUpdate(ctx context.Context, b *bot.Bot, a *agent.Agent, update *models.Update) {
|
||||
if update.Message == nil || update.Message.From == nil {
|
||||
xlog.Debug("Message or user is nil", "update", update)
|
||||
return
|
||||
}
|
||||
|
||||
username := update.Message.From.Username
|
||||
|
||||
xlog.Debug("Received message from user", "username", username, "chatID", update.Message.Chat.ID, "message", update.Message.Text)
|
||||
|
||||
internalError := func(err error, msg *models.Message) {
|
||||
xlog.Error("Error updating final message", "error", err)
|
||||
b.EditMessageText(ctx, &bot.EditMessageTextParams{
|
||||
@@ -461,17 +224,8 @@ func (t *Telegram) handleUpdate(ctx context.Context, b *bot.Bot, a *agent.Agent,
|
||||
Text: "there was an internal error. try again!",
|
||||
})
|
||||
}
|
||||
|
||||
xlog.Debug("Handling message", "update", update)
|
||||
// Handle group messages
|
||||
if update.Message.Chat.Type == "group" || update.Message.Chat.Type == "supergroup" {
|
||||
t.handleGroupMessage(ctx, b, a, update)
|
||||
return
|
||||
}
|
||||
|
||||
// Handle private messages
|
||||
if len(t.admins) > 0 && !slices.Contains(t.admins, username) {
|
||||
xlog.Info("Unauthorized user", "username", username, "admins", t.admins)
|
||||
xlog.Info("Unauthorized user", "username", username)
|
||||
_, err := b.SendMessage(ctx, &bot.SendMessageParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
Text: "you are not authorized to use this bot!",
|
||||
@@ -592,15 +346,7 @@ func (t *Telegram) handleUpdate(ctx context.Context, b *bot.Bot, a *agent.Agent,
|
||||
messages := xstrings.SplitParagraph(formattedResponse, telegramMaxMessageLength)
|
||||
|
||||
if len(messages) == 0 {
|
||||
_, err := b.EditMessageText(ctx, &bot.EditMessageTextParams{
|
||||
ChatID: update.Message.Chat.ID,
|
||||
MessageID: msg.ID,
|
||||
Text: "there was an internal error. try again!",
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error updating error message", "error", err)
|
||||
internalError(fmt.Errorf("error updating error message: %w", err), msg)
|
||||
}
|
||||
internalError(errors.New("empty response from agent"), msg)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -612,7 +358,7 @@ func (t *Telegram) handleUpdate(ctx context.Context, b *bot.Bot, a *agent.Agent,
|
||||
ParseMode: models.ParseModeMarkdown,
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error updating message", "error", err)
|
||||
internalError(fmt.Errorf("internal error: %w", err), msg)
|
||||
return
|
||||
}
|
||||
|
||||
@@ -624,7 +370,7 @@ func (t *Telegram) handleUpdate(ctx context.Context, b *bot.Bot, a *agent.Agent,
|
||||
ParseMode: models.ParseModeMarkdown,
|
||||
})
|
||||
if err != nil {
|
||||
xlog.Error("Error sending additional message", "error", err)
|
||||
internalError(fmt.Errorf("internal error: %w", err), msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -698,7 +444,7 @@ func NewTelegramConnector(config map[string]string) (*Telegram, error) {
|
||||
|
||||
admins := []string{}
|
||||
|
||||
if _, ok := config["admins"]; ok && strings.Contains(config["admins"], ",") {
|
||||
if _, ok := config["admins"]; ok {
|
||||
admins = append(admins, strings.Split(config["admins"], ",")...)
|
||||
}
|
||||
|
||||
@@ -708,8 +454,6 @@ func NewTelegramConnector(config map[string]string) (*Telegram, error) {
|
||||
placeholders: make(map[string]int),
|
||||
activeJobs: make(map[int64][]*types.Job),
|
||||
channelID: config["channel_id"],
|
||||
groupMode: config["group_mode"] == "true",
|
||||
mentionOnly: config["mention_only"] == "true",
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -734,17 +478,5 @@ func TelegramConfigMeta() []config.Field {
|
||||
Type: config.FieldTypeText,
|
||||
HelpText: "Telegram channel ID to send messages to if the agent needs to initiate a conversation",
|
||||
},
|
||||
{
|
||||
Name: "group_mode",
|
||||
Label: "Group Mode",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
HelpText: "Enable bot to respond in group chats",
|
||||
},
|
||||
{
|
||||
Name: "mention_only",
|
||||
Label: "Mention Only",
|
||||
Type: config.FieldTypeCheckbox,
|
||||
HelpText: "Bot will only respond when mentioned in group chats",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ import (
|
||||
"github.com/mudler/LocalAGI/core/types"
|
||||
"github.com/mudler/LocalAGI/pkg/config"
|
||||
"github.com/mudler/LocalAGI/pkg/llm"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
"github.com/sashabaranov/go-openai/jsonschema"
|
||||
)
|
||||
|
||||
@@ -16,7 +15,7 @@ const FilterClassifier = "classifier"
|
||||
|
||||
type ClassifierFilter struct {
|
||||
name string
|
||||
client *openai.Client
|
||||
client llm.LLMClient
|
||||
model string
|
||||
description string
|
||||
allowOnMatch bool
|
||||
|
||||
@@ -9,16 +9,16 @@
|
||||
"react-dom": "^19.1.0",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"@vitejs/plugin-react": "^4.5.0",
|
||||
"eslint": "^9.28.0",
|
||||
"@eslint/js": "^9.25.1",
|
||||
"@types/react": "^19.1.2",
|
||||
"@types/react-dom": "^19.1.3",
|
||||
"@vitejs/plugin-react": "^4.4.1",
|
||||
"eslint": "^9.25.1",
|
||||
"eslint-plugin-react-hooks": "^6.0.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.20",
|
||||
"globals": "^16.2.0",
|
||||
"react-router-dom": "^7.6.2",
|
||||
"vite": "^6.3.5",
|
||||
"globals": "^16.0.0",
|
||||
"react-router-dom": "^7.5.3",
|
||||
"vite": "^6.3.3",
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -133,15 +133,15 @@
|
||||
|
||||
"@eslint/config-helpers": ["@eslint/config-helpers@0.2.1", "", {}, "sha512-RI17tsD2frtDu/3dmI7QRrD4bedNKPM08ziRYaC5AhkGrzIAJelm9kJU1TznK+apx6V+cqRz8tfpEeG3oIyjxw=="],
|
||||
|
||||
"@eslint/core": ["@eslint/core@0.14.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-qIbV0/JZr7iSDjqAc60IqbLdsj9GDt16xQtWD+B78d/HAlvysGdZZ6rpJHGAc2T0FQx1X6thsSPdnoiGKdNtdg=="],
|
||||
"@eslint/core": ["@eslint/core@0.13.0", "", { "dependencies": { "@types/json-schema": "^7.0.15" } }, "sha512-yfkgDw1KR66rkT5A8ci4irzDysN7FRpq3ttJolR88OqQikAWqwA8j5VZyas+vjyBNFIJ7MfybJ9plMILI2UrCw=="],
|
||||
|
||||
"@eslint/eslintrc": ["@eslint/eslintrc@3.3.1", "", { "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", "espree": "^10.0.1", "globals": "^14.0.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", "js-yaml": "^4.1.0", "minimatch": "^3.1.2", "strip-json-comments": "^3.1.1" } }, "sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="],
|
||||
|
||||
"@eslint/js": ["@eslint/js@9.28.0", "", {}, "sha512-fnqSjGWd/CoIp4EXIxWVK/sHA6DOHN4+8Ix2cX5ycOY7LG0UY8nHCU5pIp2eaE1Mc7Qd8kHspYNzYXT2ojPLzg=="],
|
||||
"@eslint/js": ["@eslint/js@9.25.1", "", {}, "sha512-dEIwmjntEx8u3Uvv+kr3PDeeArL8Hw07H9kyYxCjnM9pBjfEhk6uLXSchxxzgiwtRhhzVzqmUSDFBOi1TuZ7qg=="],
|
||||
|
||||
"@eslint/object-schema": ["@eslint/object-schema@2.1.6", "", {}, "sha512-RBMg5FRL0I0gs51M/guSAj5/e14VQ4tpZnQNWwuDT66P14I43ItmPfIZRhO9fUVIPOAQXU47atlywZ/czoqFPA=="],
|
||||
|
||||
"@eslint/plugin-kit": ["@eslint/plugin-kit@0.3.1", "", { "dependencies": { "@eslint/core": "^0.14.0", "levn": "^0.4.1" } }, "sha512-0J+zgWxHN+xXONWIyPWKFMgVuJoZuGiIFu8yxk7RJjxkzpGmyja5wRFqZIVtjDVOQpV+Rw0iOAjYPE2eQyjr0w=="],
|
||||
"@eslint/plugin-kit": ["@eslint/plugin-kit@0.2.8", "", { "dependencies": { "@eslint/core": "^0.13.0", "levn": "^0.4.1" } }, "sha512-ZAoA40rNMPwSm+AeHpCq8STiNAwzWLJuP8Xv4CHIc9wv/PSuExjMrmjfYNj682vW0OOiZ1HKxzvjQr9XZIisQA=="],
|
||||
|
||||
"@humanfs/core": ["@humanfs/core@0.19.1", "", {}, "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="],
|
||||
|
||||
@@ -161,8 +161,6 @@
|
||||
|
||||
"@jridgewell/trace-mapping": ["@jridgewell/trace-mapping@0.3.25", "", { "dependencies": { "@jridgewell/resolve-uri": "^3.1.0", "@jridgewell/sourcemap-codec": "^1.4.14" } }, "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ=="],
|
||||
|
||||
"@rolldown/pluginutils": ["@rolldown/pluginutils@1.0.0-beta.9", "", {}, "sha512-e9MeMtVWo186sgvFFJOPGy7/d2j2mZhLJIdVW0C/xDluuOvymEATqz6zKsP0ZmXGzQtqlyjz5sC1sYQUoJG98w=="],
|
||||
|
||||
"@rollup/rollup-android-arm-eabi": ["@rollup/rollup-android-arm-eabi@4.40.0", "", { "os": "android", "cpu": "arm" }, "sha512-+Fbls/diZ0RDerhE8kyC6hjADCXA1K4yVNlH0EYfd2XjyH0UGgzaQ8MlT0pCXAThfxv3QUAczHaL+qSv1E4/Cg=="],
|
||||
|
||||
"@rollup/rollup-android-arm64": ["@rollup/rollup-android-arm64@4.40.0", "", { "os": "android", "cpu": "arm64" }, "sha512-PPA6aEEsTPRz+/4xxAmaoWDqh67N7wFbgFUJGMnanCFs0TV99M0M8QhhaSCks+n6EbQoFvLQgYOGXxlMGQe/6w=="],
|
||||
@@ -215,11 +213,11 @@
|
||||
|
||||
"@types/json-schema": ["@types/json-schema@7.0.15", "", {}, "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="],
|
||||
|
||||
"@types/react": ["@types/react@19.1.6", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-JeG0rEWak0N6Itr6QUx+X60uQmN+5t3j9r/OVDtWzFXKaj6kD1BwJzOksD0FF6iWxZlbE1kB0q9vtnU2ekqa1Q=="],
|
||||
"@types/react": ["@types/react@19.1.2", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-oxLPMytKchWGbnQM9O7D67uPa9paTNxO7jVoNMXgkkErULBPhPARCfkKL9ytcIJJRGjbsVwW4ugJzyFFvm/Tiw=="],
|
||||
|
||||
"@types/react-dom": ["@types/react-dom@19.1.5", "", { "peerDependencies": { "@types/react": "^19.0.0" } }, "sha512-CMCjrWucUBZvohgZxkjd6S9h0nZxXjzus6yDfUb+xLxYM7VvjKNH1tQrE9GWLql1XoOP4/Ds3bwFqShHUYraGg=="],
|
||||
"@types/react-dom": ["@types/react-dom@19.1.3", "", { "peerDependencies": { "@types/react": "^19.0.0" } }, "sha512-rJXC08OG0h3W6wDMFxQrZF00Kq6qQvw0djHRdzl3U5DnIERz0MRce3WVc7IS6JYBwtaP/DwYtRRjVlvivNveKg=="],
|
||||
|
||||
"@vitejs/plugin-react": ["@vitejs/plugin-react@4.5.0", "", { "dependencies": { "@babel/core": "^7.26.10", "@babel/plugin-transform-react-jsx-self": "^7.25.9", "@babel/plugin-transform-react-jsx-source": "^7.25.9", "@rolldown/pluginutils": "1.0.0-beta.9", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0" } }, "sha512-JuLWaEqypaJmOJPLWwO335Ig6jSgC1FTONCWAxnqcQthLTK/Yc9aH6hr9z/87xciejbQcnP3GnA1FWUSWeXaeg=="],
|
||||
"@vitejs/plugin-react": ["@vitejs/plugin-react@4.4.1", "", { "dependencies": { "@babel/core": "^7.26.10", "@babel/plugin-transform-react-jsx-self": "^7.25.9", "@babel/plugin-transform-react-jsx-source": "^7.25.9", "@types/babel__core": "^7.20.5", "react-refresh": "^0.17.0" }, "peerDependencies": { "vite": "^4.2.0 || ^5.0.0 || ^6.0.0" } }, "sha512-IpEm5ZmeXAP/osiBXVVP5KjFMzbWOonMs0NaQQl+xYnUAcq4oHUBsF2+p4MgKWG4YMmFYJU8A6sxRPuowllm6w=="],
|
||||
|
||||
"acorn": ["acorn@8.14.1", "", { "bin": { "acorn": "bin/acorn" } }, "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg=="],
|
||||
|
||||
@@ -269,7 +267,7 @@
|
||||
|
||||
"escape-string-regexp": ["escape-string-regexp@4.0.0", "", {}, "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="],
|
||||
|
||||
"eslint": ["eslint@9.28.0", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.20.0", "@eslint/config-helpers": "^0.2.1", "@eslint/core": "^0.14.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.28.0", "@eslint/plugin-kit": "^0.3.1", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-ocgh41VhRlf9+fVpe7QKzwLj9c92fDiqOj8Y3Sd4/ZmVA4Btx4PlUYPq4pp9JDyupkf1upbEXecxL2mwNV7jPQ=="],
|
||||
"eslint": ["eslint@9.25.1", "", { "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.12.1", "@eslint/config-array": "^0.20.0", "@eslint/config-helpers": "^0.2.1", "@eslint/core": "^0.13.0", "@eslint/eslintrc": "^3.3.1", "@eslint/js": "9.25.1", "@eslint/plugin-kit": "^0.2.8", "@humanfs/node": "^0.16.6", "@humanwhocodes/module-importer": "^1.0.1", "@humanwhocodes/retry": "^0.4.2", "@types/estree": "^1.0.6", "@types/json-schema": "^7.0.15", "ajv": "^6.12.4", "chalk": "^4.0.0", "cross-spawn": "^7.0.6", "debug": "^4.3.2", "escape-string-regexp": "^4.0.0", "eslint-scope": "^8.3.0", "eslint-visitor-keys": "^4.2.0", "espree": "^10.3.0", "esquery": "^1.5.0", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^8.0.0", "find-up": "^5.0.0", "glob-parent": "^6.0.2", "ignore": "^5.2.0", "imurmurhash": "^0.1.4", "is-glob": "^4.0.0", "json-stable-stringify-without-jsonify": "^1.0.1", "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", "optionator": "^0.9.3" }, "peerDependencies": { "jiti": "*" }, "optionalPeers": ["jiti"], "bin": { "eslint": "bin/eslint.js" } }, "sha512-E6Mtz9oGQWDCpV12319d59n4tx9zOTXSTmc8BLVxBx+G/0RdM5MvEEJLU9c0+aleoePYYgVTOsRblx433qmhWQ=="],
|
||||
|
||||
"eslint-plugin-react-hooks": ["eslint-plugin-react-hooks@6.0.0", "", { "dependencies": { "@babel/core": "^7.24.4", "@babel/parser": "^7.24.4", "@babel/plugin-transform-private-methods": "^7.24.4", "hermes-parser": "^0.25.1", "zod": "^3.22.4", "zod-validation-error": "^3.0.3" }, "peerDependencies": { "eslint": "^3.0.0 || ^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0-0 || ^9.0.0" } }, "sha512-NyC3yIC9fazLitYiN8eHykV5wLp/SMuUZMh+sdPSHIeN4ReXIc7if40jtGjDplAgVL/4OkN1d9gneWe9lFZgag=="],
|
||||
|
||||
@@ -311,7 +309,7 @@
|
||||
|
||||
"glob-parent": ["glob-parent@6.0.2", "", { "dependencies": { "is-glob": "^4.0.3" } }, "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="],
|
||||
|
||||
"globals": ["globals@16.2.0", "", {}, "sha512-O+7l9tPdHCU320IigZZPj5zmRCFG9xHmx9cU8FqU2Rp+JN714seHV+2S9+JslCpY4gJwU2vOGox0wzgae/MCEg=="],
|
||||
"globals": ["globals@16.0.0", "", {}, "sha512-iInW14XItCXET01CQFqudPOWP2jYMl7T+QRQT+UNcR/iQncN/F0UNpgd76iFkBPgNQb4+X3LV9tLJYzwh+Gl3A=="],
|
||||
|
||||
"has-flag": ["has-flag@4.0.0", "", {}, "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="],
|
||||
|
||||
@@ -395,9 +393,9 @@
|
||||
|
||||
"react-refresh": ["react-refresh@0.17.0", "", {}, "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ=="],
|
||||
|
||||
"react-router": ["react-router@7.6.2", "", { "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" }, "optionalPeers": ["react-dom"] }, "sha512-U7Nv3y+bMimgWjhlT5CRdzHPu2/KVmqPwKUCChW8en5P3znxUqwlYFlbmyj8Rgp1SF6zs5X4+77kBVknkg6a0w=="],
|
||||
"react-router": ["react-router@7.5.3", "", { "dependencies": { "cookie": "^1.0.1", "set-cookie-parser": "^2.6.0", "turbo-stream": "2.4.0" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" }, "optionalPeers": ["react-dom"] }, "sha512-3iUDM4/fZCQ89SXlDa+Ph3MevBrozBAI655OAfWQlTm9nBR0IKlrmNwFow5lPHttbwvITZfkeeeZFP6zt3F7pw=="],
|
||||
|
||||
"react-router-dom": ["react-router-dom@7.6.2", "", { "dependencies": { "react-router": "7.6.2" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, "sha512-Q8zb6VlTbdYKK5JJBLQEN06oTUa/RAbG/oQS1auK1I0TbJOXktqm+QENEVJU6QvWynlXPRBXI3fiOQcSEA78rA=="],
|
||||
"react-router-dom": ["react-router-dom@7.5.3", "", { "dependencies": { "react-router": "7.5.3" }, "peerDependencies": { "react": ">=18", "react-dom": ">=18" } }, "sha512-cK0jSaTyW4jV9SRKAItMIQfWZ/D6WEZafgHuuCb9g+SjhLolY78qc+De4w/Cz9ybjvLzShAmaIMEXt8iF1Cm+A=="],
|
||||
|
||||
"resolve-from": ["resolve-from@4.0.0", "", {}, "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="],
|
||||
|
||||
@@ -421,13 +419,15 @@
|
||||
|
||||
"tinyglobby": ["tinyglobby@0.2.13", "", { "dependencies": { "fdir": "^6.4.4", "picomatch": "^4.0.2" } }, "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw=="],
|
||||
|
||||
"turbo-stream": ["turbo-stream@2.4.0", "", {}, "sha512-FHncC10WpBd2eOmGwpmQsWLDoK4cqsA/UT/GqNoaKOQnT8uzhtCbg3EoUDMvqpOSAI0S26mr0rkjzbOO6S3v1g=="],
|
||||
|
||||
"type-check": ["type-check@0.4.0", "", { "dependencies": { "prelude-ls": "^1.2.1" } }, "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="],
|
||||
|
||||
"update-browserslist-db": ["update-browserslist-db@1.1.3", "", { "dependencies": { "escalade": "^3.2.0", "picocolors": "^1.1.1" }, "peerDependencies": { "browserslist": ">= 4.21.0" }, "bin": { "update-browserslist-db": "cli.js" } }, "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw=="],
|
||||
|
||||
"uri-js": ["uri-js@4.4.1", "", { "dependencies": { "punycode": "^2.1.0" } }, "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="],
|
||||
|
||||
"vite": ["vite@6.3.5", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-cZn6NDFE7wdTpINgs++ZJ4N49W2vRp8LCKrn3Ob1kYNtOo21vfDoaV5GzBfLU4MovSAB8uNRm4jgzVQZ+mBzPQ=="],
|
||||
"vite": ["vite@6.3.3", "", { "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", "picomatch": "^4.0.2", "postcss": "^8.5.3", "rollup": "^4.34.9", "tinyglobby": "^0.2.13" }, "optionalDependencies": { "fsevents": "~2.3.3" }, "peerDependencies": { "@types/node": "^18.0.0 || ^20.0.0 || >=22.0.0", "jiti": ">=1.21.0", "less": "*", "lightningcss": "^1.21.0", "sass": "*", "sass-embedded": "*", "stylus": "*", "sugarss": "*", "terser": "^5.16.0", "tsx": "^4.8.1", "yaml": "^2.4.2" }, "optionalPeers": ["@types/node", "jiti", "less", "lightningcss", "sass", "sass-embedded", "stylus", "sugarss", "terser", "tsx", "yaml"], "bin": { "vite": "bin/vite.js" } }, "sha512-5nXH+QsELbFKhsEfWLkHrvgRpTdGJzqOZ+utSdmPTvwHmvU6ITTm3xx+mRusihkcI8GeC7lCDyn3kDtiki9scw=="],
|
||||
|
||||
"which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
|
||||
|
||||
|
||||
@@ -15,15 +15,15 @@
|
||||
"highlight.js": "^11.11.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@eslint/js": "^9.28.0",
|
||||
"@types/react": "^19.1.6",
|
||||
"@types/react-dom": "^19.1.5",
|
||||
"@vitejs/plugin-react": "^4.5.0",
|
||||
"eslint": "^9.28.0",
|
||||
"@eslint/js": "^9.25.1",
|
||||
"@types/react": "^19.1.2",
|
||||
"@types/react-dom": "^19.1.3",
|
||||
"@vitejs/plugin-react": "^4.4.1",
|
||||
"eslint": "^9.25.1",
|
||||
"eslint-plugin-react-hooks": "^6.0.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.20",
|
||||
"globals": "^16.2.0",
|
||||
"react-router-dom": "^7.6.2",
|
||||
"vite": "^6.3.5"
|
||||
"globals": "^16.0.0",
|
||||
"react-router-dom": "^7.5.3",
|
||||
"vite": "^6.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||
"github.com/sashabaranov/go-openai"
|
||||
)
|
||||
import "github.com/sashabaranov/go-openai"
|
||||
|
||||
// RequestBody represents the request body structure for the OpenAI API
|
||||
type RequestBody struct {
|
||||
Model string `json:"model"`
|
||||
Input json.RawMessage `json:"input"`
|
||||
Input interface{} `json:"input"`
|
||||
InputText string `json:"input_text"`
|
||||
InputMessages []InputMessage `json:"input_messages"`
|
||||
Include []string `json:"include,omitempty"`
|
||||
@@ -31,34 +26,17 @@ type RequestBody struct {
|
||||
}
|
||||
|
||||
func (r *RequestBody) SetInputByType() {
|
||||
xlog.Debug("[Parse Request] Set input type", "input", string(r.Input))
|
||||
|
||||
var inputText string
|
||||
if err := json.Unmarshal(r.Input, &inputText); err == nil {
|
||||
r.InputText = inputText
|
||||
return
|
||||
}
|
||||
|
||||
var inputMessages []InputMessage
|
||||
if err := json.Unmarshal(r.Input, &inputMessages); err != nil {
|
||||
xlog.Warn("[Parse Request] Input type not recognized", "input", string(r.Input))
|
||||
return
|
||||
}
|
||||
|
||||
for _, i := range inputMessages {
|
||||
switch content := i.Content.(type) {
|
||||
case []ContentItem:
|
||||
i.ContentItems = content
|
||||
case string:
|
||||
i.ContentText = content
|
||||
default:
|
||||
xlog.Warn("[Parse Request] Input content type not recognized", "content", content)
|
||||
switch input := r.Input.(type) {
|
||||
case string:
|
||||
r.InputText = input
|
||||
case []any:
|
||||
for _, i := range input {
|
||||
switch i := i.(type) {
|
||||
case InputMessage:
|
||||
r.InputMessages = append(r.InputMessages, i)
|
||||
}
|
||||
}
|
||||
|
||||
r.InputMessages = append(r.InputMessages, i)
|
||||
}
|
||||
|
||||
xlog.Debug("[Parse Request] Input messages parsed", "messages", r.InputMessages)
|
||||
}
|
||||
|
||||
func (r *RequestBody) ToChatCompletionMessages() []openai.ChatCompletionMessage {
|
||||
@@ -67,15 +45,7 @@ func (r *RequestBody) ToChatCompletionMessages() []openai.ChatCompletionMessage
|
||||
for _, m := range r.InputMessages {
|
||||
content := []openai.ChatMessagePart{}
|
||||
oneImageWasFound := false
|
||||
|
||||
if m.ContentText != "" {
|
||||
content = append(content, openai.ChatMessagePart{
|
||||
Type: "text",
|
||||
Text: m.ContentText,
|
||||
})
|
||||
}
|
||||
|
||||
for _, c := range m.ContentItems {
|
||||
for _, c := range m.Content {
|
||||
switch c.Type {
|
||||
case "text":
|
||||
content = append(content, openai.ChatMessagePart{
|
||||
@@ -192,10 +162,8 @@ type ResponseBody struct {
|
||||
|
||||
// InputMessage represents a user input message
|
||||
type InputMessage struct {
|
||||
Role string `json:"role"`
|
||||
Content any `json:"content"`
|
||||
ContentText string `json:"content_text"`
|
||||
ContentItems []ContentItem `json:"content_items"`
|
||||
Role string `json:"role"`
|
||||
Content []ContentItem `json:"content"`
|
||||
}
|
||||
|
||||
// ContentItem represents an item in a content array
|
||||
|
||||
Reference in New Issue
Block a user