@@ -1,2 +1,3 @@
|
|||||||
models/
|
models/
|
||||||
db/
|
data/
|
||||||
|
volumes/
|
||||||
|
|||||||
26
.env
26
.env
@@ -1,26 +0,0 @@
|
|||||||
# Enable debug mode in the LocalAI API
|
|
||||||
DEBUG=true
|
|
||||||
|
|
||||||
# Where models are stored
|
|
||||||
MODELS_PATH=/models
|
|
||||||
|
|
||||||
# Galleries to use
|
|
||||||
GALLERIES=[{"name":"model-gallery", "url":"github:go-skynet/model-gallery/index.yaml"}, {"url": "github:go-skynet/model-gallery/huggingface.yaml","name":"huggingface"}]
|
|
||||||
|
|
||||||
# Select model configuration in the config directory
|
|
||||||
#PRELOAD_MODELS_CONFIG=/config/wizardlm-13b.yaml
|
|
||||||
PRELOAD_MODELS_CONFIG=/config/wizardlm-13b.yaml
|
|
||||||
#PRELOAD_MODELS_CONFIG=/config/wizardlm-13b-superhot.yaml
|
|
||||||
|
|
||||||
# You don't need to put a valid OpenAI key, however, the python libraries expect
|
|
||||||
# the string to be set or panics
|
|
||||||
OPENAI_API_KEY=sk---
|
|
||||||
|
|
||||||
# Set the OpenAI API base URL to point to LocalAI
|
|
||||||
DEFAULT_API_BASE=http://api:8080
|
|
||||||
|
|
||||||
# Set an image path
|
|
||||||
IMAGE_PATH=/tmp
|
|
||||||
|
|
||||||
# Set number of default threads
|
|
||||||
THREADS=14
|
|
||||||
32
.github/workflows/goreleaser.yml
vendored
Normal file
32
.github/workflows/goreleaser.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
name: goreleaser
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- 'v*' # Add this line to trigger the workflow on tag pushes that match 'v*'
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
goreleaser:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
- name: Set up Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: 1.22
|
||||||
|
- name: Run GoReleaser
|
||||||
|
uses: goreleaser/goreleaser-action@v6
|
||||||
|
with:
|
||||||
|
version: '~> v2'
|
||||||
|
args: release --clean
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
142
.github/workflows/image.yaml
vendored
142
.github/workflows/image.yaml
vendored
@@ -1,142 +0,0 @@
|
|||||||
---
|
|
||||||
name: 'build container images'
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
localagi:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
id: prep
|
|
||||||
run: |
|
|
||||||
DOCKER_IMAGE=quay.io/go-skynet/localagi
|
|
||||||
VERSION=main
|
|
||||||
SHORTREF=${GITHUB_SHA::8}
|
|
||||||
|
|
||||||
# If this is git tag, use the tag name as a docker tag
|
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
||||||
VERSION=${GITHUB_REF#refs/tags/}
|
|
||||||
fi
|
|
||||||
TAGS="${DOCKER_IMAGE}:${VERSION},${DOCKER_IMAGE}:${SHORTREF}"
|
|
||||||
|
|
||||||
# If the VERSION looks like a version number, assume that
|
|
||||||
# this is the most recent version of the image and also
|
|
||||||
# tag it 'latest'.
|
|
||||||
if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
|
||||||
TAGS="$TAGS,${DOCKER_IMAGE}:latest"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set output parameters.
|
|
||||||
echo ::set-output name=tags::${TAGS}
|
|
||||||
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@master
|
|
||||||
with:
|
|
||||||
platforms: all
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@master
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: quay.io
|
|
||||||
username: ${{ secrets.QUAY_USERNAME }}
|
|
||||||
password: ${{ secrets.QUAY_PASSWORD }}
|
|
||||||
- name: Build
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.prep.outputs.tags }}
|
|
||||||
- name: Build PRs
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
context: .
|
|
||||||
file: ./Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: false
|
|
||||||
tags: ${{ steps.prep.outputs.tags }}
|
|
||||||
discord-localagi:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Prepare
|
|
||||||
id: prep
|
|
||||||
run: |
|
|
||||||
DOCKER_IMAGE=quay.io/go-skynet/localagi-discord
|
|
||||||
VERSION=main
|
|
||||||
SHORTREF=${GITHUB_SHA::8}
|
|
||||||
|
|
||||||
# If this is git tag, use the tag name as a docker tag
|
|
||||||
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
|
||||||
VERSION=${GITHUB_REF#refs/tags/}
|
|
||||||
fi
|
|
||||||
TAGS="${DOCKER_IMAGE}:${VERSION},${DOCKER_IMAGE}:${SHORTREF}"
|
|
||||||
|
|
||||||
# If the VERSION looks like a version number, assume that
|
|
||||||
# this is the most recent version of the image and also
|
|
||||||
# tag it 'latest'.
|
|
||||||
if [[ $VERSION =~ ^v[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
|
||||||
TAGS="$TAGS,${DOCKER_IMAGE}:latest"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Set output parameters.
|
|
||||||
echo ::set-output name=tags::${TAGS}
|
|
||||||
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@master
|
|
||||||
with:
|
|
||||||
platforms: all
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
id: buildx
|
|
||||||
uses: docker/setup-buildx-action@master
|
|
||||||
|
|
||||||
- name: Login to DockerHub
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/login-action@v2
|
|
||||||
with:
|
|
||||||
registry: quay.io
|
|
||||||
username: ${{ secrets.QUAY_USERNAME }}
|
|
||||||
password: ${{ secrets.QUAY_PASSWORD }}
|
|
||||||
- name: Build
|
|
||||||
if: github.event_name != 'pull_request'
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
context: ./examples/discord
|
|
||||||
file: ./examples/discord/Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
tags: ${{ steps.prep.outputs.tags }}
|
|
||||||
- name: Build PRs
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
uses: docker/build-push-action@v4
|
|
||||||
with:
|
|
||||||
builder: ${{ steps.buildx.outputs.name }}
|
|
||||||
context: ./examples/discord
|
|
||||||
file: ./examples/discord/Dockerfile
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: false
|
|
||||||
tags: ${{ steps.prep.outputs.tags }}
|
|
||||||
86
.github/workflows/image.yml
vendored
Normal file
86
.github/workflows/image.yml
vendored
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
---
|
||||||
|
name: 'build container images'
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
concurrency:
|
||||||
|
group: ci-image-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
jobs:
|
||||||
|
containerImages:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
id: prep
|
||||||
|
run: |
|
||||||
|
DOCKER_IMAGE=quay.io/mudler/localagi
|
||||||
|
# Use branch name as default
|
||||||
|
VERSION=${GITHUB_REF#refs/heads/}
|
||||||
|
BINARY_VERSION=$(git describe --always --tags --dirty)
|
||||||
|
SHORTREF=${GITHUB_SHA::8}
|
||||||
|
# If this is git tag, use the tag name as a docker tag
|
||||||
|
if [[ $GITHUB_REF == refs/tags/* ]]; then
|
||||||
|
VERSION=${GITHUB_REF#refs/tags/}
|
||||||
|
fi
|
||||||
|
TAGS="${DOCKER_IMAGE}:${VERSION},${DOCKER_IMAGE}:${SHORTREF}"
|
||||||
|
# If the VERSION looks like a version number, assume that
|
||||||
|
# this is the most recent version of the image and also
|
||||||
|
# tag it 'latest'.
|
||||||
|
if [[ $VERSION =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
||||||
|
TAGS="$TAGS,${DOCKER_IMAGE}:latest"
|
||||||
|
fi
|
||||||
|
# Set output parameters.
|
||||||
|
echo ::set-output name=binary_version::${BINARY_VERSION}
|
||||||
|
echo ::set-output name=tags::${TAGS}
|
||||||
|
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@master
|
||||||
|
with:
|
||||||
|
platforms: all
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
id: buildx
|
||||||
|
uses: docker/setup-buildx-action@master
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name != 'pull_request'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: quay.io
|
||||||
|
username: ${{ secrets.DOCKER_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKER_PASSWORD }}
|
||||||
|
- name: Extract metadata (tags, labels) for Docker
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@2a4836ac76fe8f5d0ee3a0d89aa12a80cc552ad3
|
||||||
|
with:
|
||||||
|
images: quay.io/mudler/localagi
|
||||||
|
tags: |
|
||||||
|
type=ref,event=branch,suffix=-{{date 'YYYYMMDDHHmmss'}}
|
||||||
|
type=semver,pattern={{raw}}
|
||||||
|
type=sha,suffix=-{{date 'YYYYMMDDHHmmss'}}
|
||||||
|
type=ref,event=branch
|
||||||
|
flavor: |
|
||||||
|
latest=auto
|
||||||
|
prefix=
|
||||||
|
suffix=
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
builder: ${{ steps.buildx.outputs.name }}
|
||||||
|
build-args: |
|
||||||
|
VERSION=${{ steps.prep.outputs.binary_version }}
|
||||||
|
context: ./
|
||||||
|
file: ./Dockerfile.webui
|
||||||
|
#platforms: linux/amd64,linux/arm64
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
#tags: ${{ steps.prep.outputs.tags }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
50
.github/workflows/tests.yml
vendored
Normal file
50
.github/workflows/tests.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: Run Go Tests
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '**'
|
||||||
|
concurrency:
|
||||||
|
group: ci-tests-${{ github.head_ref || github.ref }}-${{ github.repository }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- run: |
|
||||||
|
# Add Docker's official GPG key:
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install ca-certificates curl
|
||||||
|
sudo install -m 0755 -d /etc/apt/keyrings
|
||||||
|
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
|
||||||
|
sudo chmod a+r /etc/apt/keyrings/docker.asc
|
||||||
|
|
||||||
|
# Add the repository to Apt sources:
|
||||||
|
echo \
|
||||||
|
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
|
||||||
|
$(. /etc/os-release && echo "${UBUNTU_CODENAME:-$VERSION_CODENAME}") stable" | \
|
||||||
|
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
|
||||||
|
docker version
|
||||||
|
|
||||||
|
docker run --rm hello-world
|
||||||
|
- uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: '>=1.17.0'
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
sudo apt-get update && sudo apt-get install -y make
|
||||||
|
make tests
|
||||||
|
#sudo mv coverage/coverage.txt coverage.txt
|
||||||
|
#sudo chmod 777 coverage.txt
|
||||||
|
|
||||||
|
# - name: Upload coverage to Codecov
|
||||||
|
# uses: codecov/codecov-action@v4
|
||||||
|
# with:
|
||||||
|
# token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
12
.gitignore
vendored
12
.gitignore
vendored
@@ -1,4 +1,10 @@
|
|||||||
db/
|
|
||||||
models/
|
models/
|
||||||
config.ini
|
data/
|
||||||
.dockerenv
|
pool
|
||||||
|
uploads/
|
||||||
|
local-agent-framework
|
||||||
|
localagi
|
||||||
|
LocalAGI
|
||||||
|
**/.env
|
||||||
|
.vscode
|
||||||
|
volumes/
|
||||||
|
|||||||
40
.goreleaser.yml
Normal file
40
.goreleaser.yml
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
# Make sure to check the documentation at http://goreleaser.com
|
||||||
|
version: 2
|
||||||
|
builds:
|
||||||
|
- main: ./
|
||||||
|
id: "localagi"
|
||||||
|
binary: localagi
|
||||||
|
ldflags:
|
||||||
|
- -w -s
|
||||||
|
# - -X github.com/internal.Version={{.Tag}}
|
||||||
|
# - -X github.com/internal.Commit={{.Commit}}
|
||||||
|
env:
|
||||||
|
- CGO_ENABLED=0
|
||||||
|
goos:
|
||||||
|
- linux
|
||||||
|
- windows
|
||||||
|
- darwin
|
||||||
|
- freebsd
|
||||||
|
goarch:
|
||||||
|
- amd64
|
||||||
|
- arm
|
||||||
|
- arm64
|
||||||
|
source:
|
||||||
|
enabled: true
|
||||||
|
name_template: '{{ .ProjectName }}-{{ .Tag }}-source'
|
||||||
|
archives:
|
||||||
|
# Default template uses underscores instead of -
|
||||||
|
- name_template: >-
|
||||||
|
{{ .ProjectName }}-{{ .Tag }}-
|
||||||
|
{{- if eq .Os "freebsd" }}FreeBSD
|
||||||
|
{{- else }}{{- title .Os }}{{end}}-
|
||||||
|
{{- if eq .Arch "amd64" }}x86_64
|
||||||
|
{{- else if eq .Arch "386" }}i386
|
||||||
|
{{- else }}{{ .Arch }}{{end}}
|
||||||
|
{{- if .Arm }}v{{ .Arm }}{{ end }}
|
||||||
|
checksum:
|
||||||
|
name_template: '{{ .ProjectName }}-{{ .Tag }}-checksums.txt'
|
||||||
|
snapshot:
|
||||||
|
name_template: "{{ .Tag }}-next"
|
||||||
|
changelog:
|
||||||
|
use: github-native
|
||||||
18
Dockerfile
18
Dockerfile
@@ -1,18 +0,0 @@
|
|||||||
FROM python:3.10-bullseye
|
|
||||||
WORKDIR /app
|
|
||||||
COPY ./requirements.txt /app/requirements.txt
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
|
||||||
|
|
||||||
# Install package dependencies
|
|
||||||
RUN apt-get update -y && \
|
|
||||||
apt-get install -y --no-install-recommends \
|
|
||||||
alsa-utils \
|
|
||||||
libsndfile1-dev && \
|
|
||||||
apt-get clean
|
|
||||||
|
|
||||||
COPY . /app
|
|
||||||
RUN pip install .
|
|
||||||
ENTRYPOINT [ "python", "./main.py" ];
|
|
||||||
12
Dockerfile.realtimesst
Normal file
12
Dockerfile.realtimesst
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# python
|
||||||
|
FROM python:3.10-slim
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
RUN apt-get update && apt-get install -y python3-dev portaudio19-dev ffmpeg build-essential
|
||||||
|
|
||||||
|
RUN pip install RealtimeSTT
|
||||||
|
|
||||||
|
#COPY ./example/realtimesst /app
|
||||||
|
# https://github.com/KoljaB/RealtimeSTT/blob/master/RealtimeSTT_server/README.md#server-usage
|
||||||
|
ENTRYPOINT ["stt-server"]
|
||||||
|
#ENTRYPOINT [ "/app/main.py" ]
|
||||||
58
Dockerfile.webui
Normal file
58
Dockerfile.webui
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
# Define argument for linker flags
|
||||||
|
ARG LDFLAGS=-s -w
|
||||||
|
|
||||||
|
# Use Bun container for building the React UI
|
||||||
|
FROM oven/bun:1 as ui-builder
|
||||||
|
|
||||||
|
# Set the working directory for the React UI
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy package.json and bun.lockb (if exists)
|
||||||
|
COPY webui/react-ui/package.json webui/react-ui/bun.lockb* ./
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
RUN bun install --frozen-lockfile
|
||||||
|
|
||||||
|
# Copy the rest of the React UI source code
|
||||||
|
COPY webui/react-ui/ ./
|
||||||
|
|
||||||
|
# Build the React UI
|
||||||
|
RUN bun run build
|
||||||
|
|
||||||
|
# Use a temporary build image based on Golang 1.22-alpine
|
||||||
|
FROM golang:1.22-alpine as builder
|
||||||
|
|
||||||
|
# Set environment variables: linker flags and disable CGO
|
||||||
|
ENV LDFLAGS=$LDFLAGS CGO_ENABLED=0
|
||||||
|
|
||||||
|
# Install git
|
||||||
|
RUN apk add --no-cache git
|
||||||
|
RUN rm -rf /tmp/* /var/cache/apk/*
|
||||||
|
|
||||||
|
# Set the working directory
|
||||||
|
WORKDIR /work
|
||||||
|
|
||||||
|
# Copy go.mod and go.sum files first to leverage Docker cache
|
||||||
|
COPY go.mod go.sum ./
|
||||||
|
|
||||||
|
# Download dependencies - this layer will be cached as long as go.mod and go.sum don't change
|
||||||
|
RUN go mod download
|
||||||
|
|
||||||
|
# Now copy the rest of the source code
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
# Copy the built React UI from the ui-builder stage
|
||||||
|
COPY --from=ui-builder /app/dist /work/webui/react-ui/dist
|
||||||
|
|
||||||
|
# Build the application
|
||||||
|
RUN go build -ldflags="$LDFLAGS" -o localagi ./
|
||||||
|
|
||||||
|
FROM scratch
|
||||||
|
|
||||||
|
# Copy the webui binary from the builder stage to the final image
|
||||||
|
COPY --from=builder /work/localagi /localagi
|
||||||
|
COPY --from=builder /etc/ssl/ /etc/ssl/
|
||||||
|
COPY --from=builder /tmp /tmp
|
||||||
|
|
||||||
|
# Define the command that will be run when the container is started
|
||||||
|
ENTRYPOINT ["/localagi"]
|
||||||
2
LICENSE
2
LICENSE
@@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2023 Ettore Di Giacinto
|
Copyright (c) 2023-2025 Ettore Di Giacinto (mudler@localai.io)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
32
Makefile
Normal file
32
Makefile
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
GOCMD?=go
|
||||||
|
IMAGE_NAME?=webui
|
||||||
|
ROOT_DIR:=$(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
|
||||||
|
|
||||||
|
prepare-tests:
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
|
cleanup-tests:
|
||||||
|
docker compose down
|
||||||
|
|
||||||
|
tests: prepare-tests
|
||||||
|
LOCALAGI_MODEL="arcee-agent" LOCALAI_API_URL="http://localhost:8081" LOCALAGI_API_URL="http://localhost:8080" $(GOCMD) run github.com/onsi/ginkgo/v2/ginkgo --fail-fast -v -r ./...
|
||||||
|
|
||||||
|
run-nokb:
|
||||||
|
$(MAKE) run KBDISABLEINDEX=true
|
||||||
|
|
||||||
|
webui/react-ui/dist:
|
||||||
|
docker run --entrypoint /bin/bash -v $(ROOT_DIR):/app oven/bun:1 -c "cd /app/webui/react-ui && bun install && bun run build"
|
||||||
|
|
||||||
|
.PHONY: build
|
||||||
|
build: webui/react-ui/dist
|
||||||
|
$(GOCMD) build -o localagi ./
|
||||||
|
|
||||||
|
.PHONY: run
|
||||||
|
run: webui/react-ui/dist
|
||||||
|
$(GOCMD) run ./
|
||||||
|
|
||||||
|
build-image:
|
||||||
|
docker build -t $(IMAGE_NAME) -f Dockerfile.webui .
|
||||||
|
|
||||||
|
image-push:
|
||||||
|
docker push $(IMAGE_NAME)
|
||||||
518
README.md
518
README.md
@@ -1,184 +1,398 @@
|
|||||||
|
<p align="center">
|
||||||
|
<img src="https://github.com/user-attachments/assets/6958ffb3-31cf-441e-b99d-ce34ec6fc88f" alt="LocalAGI Logo" width="220"/>
|
||||||
|
</p>
|
||||||
|
|
||||||
<h1 align="center">
|
<h3 align="center"><em>Your AI. Your Hardware. Your Rules.</em></h3>
|
||||||
<br>
|
|
||||||
<img height="300" src="https://github.com/mudler/LocalAGI/assets/2420543/b69817ce-2361-4234-a575-8f578e159f33"> <br>
|
|
||||||
LocalAGI
|
|
||||||
<br>
|
|
||||||
</h1>
|
|
||||||
|
|
||||||
[AutoGPT](https://github.com/Significant-Gravitas/Auto-GPT), [babyAGI](https://github.com/yoheinakajima/babyagi), ... and now LocalAGI!
|
<div align="center">
|
||||||
|
|
||||||
LocalAGI is a small 🤖 virtual assistant that you can run locally, made by the [LocalAI](https://github.com/go-skynet/LocalAI) author and powered by it.
|
[](https://goreportcard.com/report/github.com/mudler/LocalAGI)
|
||||||
|
[](https://opensource.org/licenses/MIT)
|
||||||
|
[](https://github.com/mudler/LocalAGI/stargazers)
|
||||||
|
[](https://github.com/mudler/LocalAGI/issues)
|
||||||
|
|
||||||
The goal is:
|
</div>
|
||||||
- Keep it simple, hackable and easy to understand
|
|
||||||
- No API keys needed, No cloud services needed, 100% Local. Tailored for Local use, however still compatible with OpenAI.
|
|
||||||
- Smart-agent/virtual assistant that can do tasks
|
|
||||||
- Small set of dependencies
|
|
||||||
- Run with Docker/Podman/Containers
|
|
||||||
- Rather than trying to do everything, provide a good starting point for other projects
|
|
||||||
|
|
||||||
Note: Be warned! It was hacked in a weekend, and it's just an experiment to see what can be done with local LLMs.
|
We empower you building AI Agents that you can run locally, without coding.
|
||||||
|
|
||||||

|
**LocalAGI** is a powerful, self-hostable AI Agent platform designed for maximum privacy and flexibility. A complete drop-in replacement for OpenAI's Responses APIs with advanced agentic capabilities. No clouds. No data leaks. Just pure local AI that works on consumer-grade hardware (CPU and GPU).
|
||||||
|
|
||||||
## 🚀 Features
|
## 🛡️ Take Back Your Privacy
|
||||||
|
|
||||||
- 🧠 LLM for intent detection
|
Are you tired of AI wrappers calling out to cloud APIs, risking your privacy? So were we.
|
||||||
- 🧠 Uses functions for actions
|
|
||||||
- 📝 Write to long-term memory
|
|
||||||
- 📖 Read from long-term memory
|
|
||||||
- 🌐 Internet access for search
|
|
||||||
- :card_file_box: Write files
|
|
||||||
- 🔌 Plan steps to achieve a goal
|
|
||||||
- 🤖 Avatar creation with Stable Diffusion
|
|
||||||
- 🗨️ Conversational
|
|
||||||
- 🗣️ Voice synthesis with TTS
|
|
||||||
|
|
||||||
## Demo
|
LocalAGI ensures your data stays exactly where you want it—on your hardware. No API keys, no cloud subscriptions, no compromise.
|
||||||
|
|
||||||
Search on internet (interactive mode)
|
## 🌟 Key Features
|
||||||
|
|
||||||
https://github.com/mudler/LocalAGI/assets/2420543/23199ca3-7380-4efc-9fac-a6bc2b52bdb3
|
- 🎛 **No-Code Agents**: Easy-to-configure multiple agents via Web UI.
|
||||||
|
- 🖥 **Web-Based Interface**: Simple and intuitive agent management.
|
||||||
|
- 🤖 **Advanced Agent Teaming**: Instantly create cooperative agent teams from a single prompt.
|
||||||
|
- 📡 **Connectors Galore**: Built-in integrations with Discord, Slack, Telegram, GitHub Issues, and IRC.
|
||||||
|
- 🛠 **Comprehensive REST API**: Seamless integration into your workflows. Every agent created will support OpenAI Responses API out of the box.
|
||||||
|
- 📚 **Short & Long-Term Memory**: Powered by [LocalRAG](https://github.com/mudler/LocalRAG).
|
||||||
|
- 🧠 **Planning & Reasoning**: Agents intelligently plan, reason, and adapt.
|
||||||
|
- 🔄 **Periodic Tasks**: Schedule tasks with cron-like syntax.
|
||||||
|
- 💾 **Memory Management**: Control memory usage with options for long-term and summary memory.
|
||||||
|
- 🖼 **Multimodal Support**: Ready for vision, text, and more.
|
||||||
|
- 🔧 **Extensible Custom Actions**: Easily script dynamic agent behaviors in Go (interpreted, no compilation!).
|
||||||
|
- 🛠 **Fully Customizable Models**: Use your own models or integrate seamlessly with [LocalAI](https://github.com/mudler/LocalAI).
|
||||||
|
|
||||||
Plan a road trip (batch mode)
|
## 🛠️ Quickstart
|
||||||
|
|
||||||
https://github.com/mudler/LocalAGI/assets/2420543/9ba43b82-dec5-432a-bdb9-8318e7db59a4
|
|
||||||
|
|
||||||
> Note: The demo is with a GPU and `30b` models size
|
|
||||||
|
|
||||||
## :book: Quick start
|
|
||||||
|
|
||||||
No frills, just run docker-compose and start chatting with your virtual assistant:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Modify the configuration
|
# Clone the repository
|
||||||
# vim .env
|
git clone https://github.com/mudler/LocalAGI
|
||||||
# first run (and pulling the container)
|
cd LocalAGI
|
||||||
docker-compose up
|
|
||||||
# next runs
|
# CPU setup
|
||||||
docker-compose run -i --rm localagi
|
docker compose up -f docker-compose.yml
|
||||||
|
|
||||||
|
# GPU setup
|
||||||
|
docker compose up -f docker-compose.gpu.yml
|
||||||
```
|
```
|
||||||
|
|
||||||
## How to use it
|
Access your agents at `http://localhost:3000`
|
||||||
|
|
||||||
By default localagi starts in interactive mode
|
## 🏆 Why Choose LocalAGI?
|
||||||
|
|
||||||
### Examples
|
- **✓ Ultimate Privacy**: No data ever leaves your hardware.
|
||||||
|
- **✓ Flexible Model Integration**: Supports GGUF, GGML, and more thanks to [LocalAI](https://github.com/mudler/LocalAI).
|
||||||
|
- **✓ Developer-Friendly**: Rich APIs and intuitive interfaces.
|
||||||
|
- **✓ Effortless Setup**: Simple Docker compose setups and pre-built binaries.
|
||||||
|
- **✓ Feature-Rich**: From planning to multimodal capabilities, connectors for Slack, MCP support, LocalAGI has it all.
|
||||||
|
|
||||||
Road trip planner by limiting searching to internet to 3 results only:
|
## 🌐 The Local Ecosystem
|
||||||
|
|
||||||
|
LocalAGI is part of the powerful Local family of privacy-focused AI tools:
|
||||||
|
|
||||||
|
- [**LocalAI**](https://github.com/mudler/LocalAI): Run Large Language Models locally.
|
||||||
|
- [**LocalRAG**](https://github.com/mudler/LocalRAG): Retrieval-Augmented Generation with local storage.
|
||||||
|
- [**LocalAGI**](https://github.com/mudler/LocalAGI): Deploy intelligent AI agents securely and privately.
|
||||||
|
|
||||||
|
## 🌟 Screenshots
|
||||||
|
|
||||||
|
### Powerful Web UI
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
### Connectors Ready-to-Go
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<img src="https://github.com/user-attachments/assets/4171072f-e4bf-4485-982b-55d55086f8fc" alt="Telegram" width="60"/>
|
||||||
|
<img src="https://github.com/user-attachments/assets/9235da84-0187-4f26-8482-32dcc55702ef" alt="Discord" width="220"/>
|
||||||
|
<img src="https://github.com/user-attachments/assets/a88c3d88-a387-4fb5-b513-22bdd5da7413" alt="Slack" width="220"/>
|
||||||
|
<img src="https://github.com/user-attachments/assets/d249cdf5-ab34-4ab1-afdf-b99e2db182d2" alt="IRC" width="220"/>
|
||||||
|
<img src="https://github.com/user-attachments/assets/52c852b0-4b50-4926-9fa0-aa50613ac622" alt="GitHub" width="220"/>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
## 📖 Full Documentation
|
||||||
|
|
||||||
|
Explore detailed documentation including:
|
||||||
|
- [Installation Options](#installation-options)
|
||||||
|
- [REST API Documentation](#rest-api)
|
||||||
|
- [Connector Configuration](#connectors)
|
||||||
|
- [Agent Configuration](#agent-configuration-reference)
|
||||||
|
|
||||||
|
### Environment Configuration
|
||||||
|
|
||||||
|
| Variable | What It Does |
|
||||||
|
|----------|--------------|
|
||||||
|
| `LOCALAGI_MODEL` | Your go-to model |
|
||||||
|
| `LOCALAGI_MULTIMODAL_MODEL` | Optional model for multimodal capabilities |
|
||||||
|
| `LOCALAGI_LLM_API_URL` | OpenAI-compatible API server URL |
|
||||||
|
| `LOCALAGI_LLM_API_KEY` | API authentication |
|
||||||
|
| `LOCALAGI_TIMEOUT` | Request timeout settings |
|
||||||
|
| `LOCALAGI_STATE_DIR` | Where state gets stored |
|
||||||
|
| `LOCALAGI_LOCALRAG_URL` | LocalRAG connection |
|
||||||
|
| `LOCALAGI_ENABLE_CONVERSATIONS_LOGGING` | Toggle conversation logs |
|
||||||
|
| `LOCALAGI_API_KEYS` | A comma separated list of api keys used for authentication |
|
||||||
|
|
||||||
|
## Installation Options
|
||||||
|
|
||||||
|
### Pre-Built Binaries
|
||||||
|
|
||||||
|
Download ready-to-run binaries from the [Releases](https://github.com/mudler/LocalAGI/releases) page.
|
||||||
|
|
||||||
|
### Source Build
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- Go 1.20+
|
||||||
|
- Git
|
||||||
|
- Bun 1.2+
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker-compose run -i --rm localagi \
|
# Clone repo
|
||||||
--skip-avatar \
|
git clone https://github.com/mudler/LocalAGI.git
|
||||||
--subtask-context \
|
cd LocalAGI
|
||||||
--postprocess \
|
|
||||||
--search-results 3 \
|
# Build it
|
||||||
--prompt "prepare a plan for my roadtrip to san francisco"
|
cd webui/react-ui && bun i && bun run build
|
||||||
|
cd ../..
|
||||||
|
go build -o localagi
|
||||||
|
|
||||||
|
# Run it
|
||||||
|
./localagi
|
||||||
```
|
```
|
||||||
|
|
||||||
Limit results of planning to 3 steps:
|
### Development
|
||||||
|
|
||||||
|
The development workflow is similar to the source build, but with additional steps for hot reloading of the frontend:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker-compose run -i --rm localagi \
|
# Clone repo
|
||||||
--skip-avatar \
|
git clone https://github.com/mudler/LocalAGI.git
|
||||||
--subtask-context \
|
cd LocalAGI
|
||||||
--postprocess \
|
|
||||||
--search-results 1 \
|
# Install dependencies and start frontend development server
|
||||||
--prompt "do a plan for my roadtrip to san francisco" \
|
cd webui/react-ui && bun i && bun run dev
|
||||||
--plan-message "The assistant replies with a plan of 3 steps to answer the request with a list of subtasks with logical steps. The reasoning includes a self-contained, detailed and descriptive instruction to fullfill the task."
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Advanced
|
Then in seperate terminal:
|
||||||
|
|
||||||
localagi has several options in the CLI to tweak the experience:
|
|
||||||
|
|
||||||
- `--system-prompt` is the system prompt to use. If not specified, it will use none.
|
|
||||||
- `--prompt` is the prompt to use for batch mode. If not specified, it will default to interactive mode.
|
|
||||||
- `--interactive` is the interactive mode. When used with `--prompt` will drop you in an interactive session after the first prompt is evaluated.
|
|
||||||
- `--skip-avatar` will skip avatar creation. Useful if you want to run it in a headless environment.
|
|
||||||
- `--re-evaluate` will re-evaluate if another action is needed or we have completed the user request.
|
|
||||||
- `--postprocess` will postprocess the reasoning for analysis.
|
|
||||||
- `--subtask-context` will include context in subtasks.
|
|
||||||
- `--search-results` is the number of search results to use.
|
|
||||||
- `--plan-message` is the message to use during planning. You can override the message for example to force a plan to have a different message.
|
|
||||||
- `--tts-api-base` is the TTS API base. Defaults to `http://api:8080`.
|
|
||||||
- `--localai-api-base` is the LocalAI API base. Defaults to `http://api:8080`.
|
|
||||||
- `--images-api-base` is the Images API base. Defaults to `http://api:8080`.
|
|
||||||
- `--embeddings-api-base` is the Embeddings API base. Defaults to `http://api:8080`.
|
|
||||||
- `--functions-model` is the functions model to use. Defaults to `functions`.
|
|
||||||
- `--embeddings-model` is the embeddings model to use. Defaults to `all-MiniLM-L6-v2`.
|
|
||||||
- `--llm-model` is the LLM model to use. Defaults to `gpt-4`.
|
|
||||||
- `--tts-model` is the TTS model to use. Defaults to `en-us-kathleen-low.onnx`.
|
|
||||||
- `--stablediffusion-model` is the Stable Diffusion model to use. Defaults to `stablediffusion`.
|
|
||||||
- `--stablediffusion-prompt` is the Stable Diffusion prompt to use. Defaults to `DEFAULT_PROMPT`.
|
|
||||||
- `--force-action` will force a specific action.
|
|
||||||
- `--debug` will enable debug mode.
|
|
||||||
|
|
||||||
### Customize
|
|
||||||
|
|
||||||
To use a different model, you can see the examples in the `config` folder.
|
|
||||||
To select a model, modify the `.env` file and change the `PRELOAD_MODELS_CONFIG` variable to use a different configuration file.
|
|
||||||
|
|
||||||
### Caveats
|
|
||||||
|
|
||||||
The "goodness" of a model has a big impact on how LocalAGI works. Currently `13b` models are powerful enough to actually able to perform multi-step tasks or do more actions. However, it is quite slow when running on CPU (no big surprise here).
|
|
||||||
|
|
||||||
The context size is a limitation - you can find in the `config` examples to run with superhot 8k context size, but the quality is not good enough to perform complex tasks.
|
|
||||||
|
|
||||||
## What is LocalAGI?
|
|
||||||
|
|
||||||
It is a dead simple experiment to show how to tie the various LocalAI functionalities to create a virtual assistant that can do tasks. It is simple on purpose, trying to be minimalistic and easy to understand and customize for everyone.
|
|
||||||
|
|
||||||
It is different from babyAGI or AutoGPT as it uses [LocalAI functions](https://localai.io/features/openai-functions/) - it is a from scratch attempt built on purpose to run locally with [LocalAI](https://localai.io) (no API keys needed!) instead of expensive, cloud services. It sets apart from other projects as it strives to be small, and easy to fork on.
|
|
||||||
|
|
||||||
### How it works?
|
|
||||||
|
|
||||||
`LocalAGI` just does the minimal around LocalAI functions to create a virtual assistant that can do generic tasks. It works by an endless loop of `intent detection`, `function invocation`, `self-evaluation` and `reply generation` (if it decides to reply! :)). The agent is capable of planning complex tasks by invoking multiple functions, and remember things from the conversation.
|
|
||||||
|
|
||||||
In a nutshell, it goes like this:
|
|
||||||
|
|
||||||
- Decide based on the conversation history if it needs to take an action by using functions. It uses the LLM to detect the intent from the conversation.
|
|
||||||
- if it need to take an action (e.g. "remember something from the conversation" ) or generate complex tasks ( executing a chain of functions to achieve a goal ) it invokes the functions
|
|
||||||
- it re-evaluates if it needs to do any other action
|
|
||||||
- return the result back to the LLM to generate a reply for the user
|
|
||||||
|
|
||||||
Under the hood LocalAI converts functions to llama.cpp BNF grammars. While OpenAI fine-tuned a model to reply to functions, LocalAI constrains the LLM to follow grammars. This is a much more efficient way to do it, and it is also more flexible as you can define your own functions and grammars. For learning more about this, check out the [LocalAI documentation](https://localai.io/docs/llm) and my tweet that explains how it works under the hoods: https://twitter.com/mudler_it/status/1675524071457533953.
|
|
||||||
|
|
||||||
### Agent functions
|
|
||||||
|
|
||||||
The intention of this project is to keep the agent minimal, so can be built on top of it or forked. The agent is capable of doing the following functions:
|
|
||||||
- remember something from the conversation
|
|
||||||
- recall something from the conversation
|
|
||||||
- search something from the internet
|
|
||||||
- plan a complex task by invoking multiple functions
|
|
||||||
- write files to disk
|
|
||||||
|
|
||||||
## Roadmap
|
|
||||||
|
|
||||||
- [x] 100% Local, with Local AI. NO API KEYS NEEDED!
|
|
||||||
- [x] Create a simple virtual assistant
|
|
||||||
- [x] Make the virtual assistant do functions like store long-term memory and autonomously search between them when needed
|
|
||||||
- [x] Create the assistant avatar with Stable Diffusion
|
|
||||||
- [x] Give it a voice
|
|
||||||
- [ ] Use weaviate instead of Chroma
|
|
||||||
- [ ] Get voice input (push to talk or wakeword)
|
|
||||||
- [ ] Make a REST API (OpenAI compliant?) so can be plugged by e.g. a third party service
|
|
||||||
- [x] Take a system prompt so can act with a "character" (e.g. "answer in rick and morty style")
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
Run docker-compose with main.py checked-out:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker-compose run -v main.py:/app/main.py -i --rm localagi
|
# Start development server
|
||||||
|
cd ../.. && go run main.go
|
||||||
```
|
```
|
||||||
|
|
||||||
## Notes
|
> Note: see webui/react-ui/.vite.config.js for env vars that can be used to configure the backend URL
|
||||||
|
|
||||||
- a 13b model is enough for doing contextualized research and search/retrieve memory
|
## CONNECTORS
|
||||||
- a 30b model is enough to generate a roadmap trip plan ( so cool! )
|
|
||||||
- With superhot models looses its magic, but maybe suitable for search
|
Link your agents to the services you already use. Configuration examples below.
|
||||||
- Context size is your enemy. `--postprocess` some times helps, but not always
|
|
||||||
- It can be silly!
|
### GitHub Issues
|
||||||
- It is slow on CPU, don't expect `7b` models to perform good, and `13b` models perform better but on CPU are quite slow.
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"token": "YOUR_PAT_TOKEN",
|
||||||
|
"repository": "repo-to-monitor",
|
||||||
|
"owner": "repo-owner",
|
||||||
|
"botUserName": "bot-username"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Discord
|
||||||
|
|
||||||
|
After [creating your Discord bot](https://discordpy.readthedocs.io/en/stable/discord.html):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"token": "Bot YOUR_DISCORD_TOKEN",
|
||||||
|
"defaultChannel": "OPTIONAL_CHANNEL_ID"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
> Don't forget to enable "Message Content Intent" in Bot(tab) settings!
|
||||||
|
> Enable " Message Content Intent " in the Bot tab!
|
||||||
|
|
||||||
|
### Slack
|
||||||
|
|
||||||
|
Use the included `slack.yaml` manifest to create your app, then configure:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"botToken": "xoxb-your-bot-token",
|
||||||
|
"appToken": "xapp-your-app-token"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Create Oauth token bot token from "OAuth & Permissions" -> "OAuth Tokens for Your Workspace"
|
||||||
|
- Create App level token (from "Basic Information" -> "App-Level Tokens" ( scope connections:writeRoute authorizations:read ))
|
||||||
|
|
||||||
|
|
||||||
|
### Telegram
|
||||||
|
|
||||||
|
Get a token from @botfather, then:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"token": "your-bot-father-token"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### IRC
|
||||||
|
|
||||||
|
Connect to IRC networks:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"server": "irc.example.com",
|
||||||
|
"port": "6667",
|
||||||
|
"nickname": "LocalAGIBot",
|
||||||
|
"channel": "#yourchannel",
|
||||||
|
"alwaysReply": "false"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## REST API
|
||||||
|
|
||||||
|
### Agent Management
|
||||||
|
|
||||||
|
| Endpoint | Method | Description | Example |
|
||||||
|
|----------|--------|-------------|---------|
|
||||||
|
| `/api/agents` | GET | List all available agents | [Example](#get-all-agents) |
|
||||||
|
| `/api/agent/:name/status` | GET | View agent status history | [Example](#get-agent-status) |
|
||||||
|
| `/api/agent/create` | POST | Create a new agent | [Example](#create-agent) |
|
||||||
|
| `/api/agent/:name` | DELETE | Remove an agent | [Example](#delete-agent) |
|
||||||
|
| `/api/agent/:name/pause` | PUT | Pause agent activities | [Example](#pause-agent) |
|
||||||
|
| `/api/agent/:name/start` | PUT | Resume a paused agent | [Example](#start-agent) |
|
||||||
|
| `/api/agent/:name/config` | GET | Get agent configuration | |
|
||||||
|
| `/api/agent/:name/config` | PUT | Update agent configuration | |
|
||||||
|
| `/api/meta/agent/config` | GET | Get agent configuration metadata | |
|
||||||
|
| `/settings/export/:name` | GET | Export agent config | [Example](#export-agent) |
|
||||||
|
| `/settings/import` | POST | Import agent config | [Example](#import-agent) |
|
||||||
|
|
||||||
|
### Actions and Groups
|
||||||
|
|
||||||
|
| Endpoint | Method | Description | Example |
|
||||||
|
|----------|--------|-------------|---------|
|
||||||
|
| `/api/actions` | GET | List available actions | |
|
||||||
|
| `/api/action/:name/run` | POST | Execute an action | |
|
||||||
|
| `/api/agent/group/generateProfiles` | POST | Generate group profiles | |
|
||||||
|
| `/api/agent/group/create` | POST | Create a new agent group | |
|
||||||
|
|
||||||
|
### Chat Interactions
|
||||||
|
|
||||||
|
| Endpoint | Method | Description | Example |
|
||||||
|
|----------|--------|-------------|---------|
|
||||||
|
| `/api/chat/:name` | POST | Send message & get response | [Example](#send-message) |
|
||||||
|
| `/api/notify/:name` | POST | Send notification to agent | [Example](#notify-agent) |
|
||||||
|
| `/api/sse/:name` | GET | Real-time agent event stream | [Example](#agent-sse-stream) |
|
||||||
|
| `/v1/responses` | POST | Send message & get response | [OpenAI's Responses](https://platform.openai.com/docs/api-reference/responses/create) |
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary><strong>Curl Examples</strong></summary>
|
||||||
|
|
||||||
|
#### Get All Agents
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://localhost:3000/api/agents"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Agent Status
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://localhost:3000/api/agent/my-agent/status"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create Agent
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://localhost:3000/api/agent/create" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"name": "my-agent",
|
||||||
|
"model": "gpt-4",
|
||||||
|
"system_prompt": "You are an AI assistant.",
|
||||||
|
"enable_kb": true,
|
||||||
|
"enable_reasoning": true
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Agent
|
||||||
|
```bash
|
||||||
|
curl -X DELETE "http://localhost:3000/api/agent/my-agent"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Pause Agent
|
||||||
|
```bash
|
||||||
|
curl -X PUT "http://localhost:3000/api/agent/my-agent/pause"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Start Agent
|
||||||
|
```bash
|
||||||
|
curl -X PUT "http://localhost:3000/api/agent/my-agent/start"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get Agent Configuration
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://localhost:3000/api/agent/my-agent/config"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Update Agent Configuration
|
||||||
|
```bash
|
||||||
|
curl -X PUT "http://localhost:3000/api/agent/my-agent/config" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"model": "gpt-4",
|
||||||
|
"system_prompt": "You are an AI assistant."
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Export Agent
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://localhost:3000/settings/export/my-agent" --output my-agent.json
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Import Agent
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://localhost:3000/settings/import" \
|
||||||
|
-F "file=@/path/to/my-agent.json"
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Send Message
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://localhost:3000/api/chat/my-agent" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"message": "Hello, how are you today?"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Notify Agent
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://localhost:3000/api/notify/my-agent" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"message": "Important notification"}'
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Agent SSE Stream
|
||||||
|
```bash
|
||||||
|
curl -N -X GET "http://localhost:3000/api/sse/my-agent"
|
||||||
|
```
|
||||||
|
Note: For proper SSE handling, you should use a client that supports SSE natively.
|
||||||
|
|
||||||
|
</details>
|
||||||
|
|
||||||
|
### Agent Configuration Reference
|
||||||
|
|
||||||
|
The agent configuration defines how an agent behaves and what capabilities it has. You can view the available configuration options and their descriptions by using the metadata endpoint:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://localhost:3000/api/meta/agent/config"
|
||||||
|
```
|
||||||
|
|
||||||
|
This will return a JSON object containing all available configuration fields, their types, and descriptions.
|
||||||
|
|
||||||
|
Here's an example of the agent configuration structure:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "my-agent",
|
||||||
|
"model": "gpt-4",
|
||||||
|
"multimodal_model": "gpt-4-vision",
|
||||||
|
"hud": true,
|
||||||
|
"standalone_job": false,
|
||||||
|
"random_identity": false,
|
||||||
|
"initiate_conversations": true,
|
||||||
|
"enable_planning": true,
|
||||||
|
"identity_guidance": "You are a helpful assistant.",
|
||||||
|
"periodic_runs": "0 * * * *",
|
||||||
|
"permanent_goal": "Help users with their questions.",
|
||||||
|
"enable_kb": true,
|
||||||
|
"enable_reasoning": true,
|
||||||
|
"kb_results": 5,
|
||||||
|
"can_stop_itself": false,
|
||||||
|
"system_prompt": "You are an AI assistant.",
|
||||||
|
"long_term_memory": true,
|
||||||
|
"summary_long_term_memory": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## LICENSE
|
||||||
|
|
||||||
|
MIT License — See the [LICENSE](LICENSE) file for details.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<strong>LOCAL PROCESSING. GLOBAL THINKING.</strong><br>
|
||||||
|
Made with ❤️ by <a href="https://github.com/mudler">mudler</a>
|
||||||
|
</p>
|
||||||
|
|||||||
@@ -1,45 +0,0 @@
|
|||||||
- id: huggingface@TheBloke/WizardLM-13B-V1.1-GGML/wizardlm-13b-v1.1.ggmlv3.q5_K_M.bin
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@TheBloke/WizardLM-13B-V1.1-GGML/wizardlm-13b-v1.1.ggmlv3.q5_K_M.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
- id: huggingface@TheBloke/WizardLM-13B-V1-0-Uncensored-SuperHOT-8K-GGML/wizardlm-13b-v1.0-superhot-8k.ggmlv3.q4_K_M.bin
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 8192
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
rope_freq_scale: 0.25
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@TheBloke/WizardLM-13B-V1-0-Uncensored-SuperHOT-8K-GGML/wizardlm-13b-v1.0-superhot-8k.ggmlv3.q4_K_M.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 8192
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
rope_freq_scale: 0.25
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
- id: huggingface@thebloke/wizardlm-13b-v1.0-uncensored-ggml/wizardlm-13b-v1.0-uncensored.ggmlv3.q4_k_m.bin
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@thebloke/wizardlm-13b-v1.0-uncensored-ggml/wizardlm-13b-v1.0-uncensored.ggmlv3.q4_0.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
@@ -1,47 +0,0 @@
|
|||||||
- id: huggingface@TheBloke/WizardLM-Uncensored-SuperCOT-StoryTelling-30B-SuperHOT-8K-GGML/WizardLM-Uncensored-SuperCOT-StoryTelling-30b-superhot-8k.ggmlv3.q4_0.bin
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 8192
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
rope_freq_scale: 0.25
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@TheBloke/WizardLM-Uncensored-SuperCOT-StoryTelling-30B-SuperHOT-8K-GGML/WizardLM-Uncensored-SuperCOT-StoryTelling-30b-superhot-8k.ggmlv3.q4_0.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 8192
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
rope_freq_scale: 0.25
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
@@ -1,46 +0,0 @@
|
|||||||
- id: huggingface@thebloke/wizardlm-30b-uncensored-ggml/wizardlm-30b-uncensored.ggmlv3.q2_k.bin
|
|
||||||
galleryModel:
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 4096
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@thebloke/wizardlm-30b-uncensored-ggml/wizardlm-30b-uncensored.ggmlv3.q2_k.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 4096
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
@@ -1,45 +0,0 @@
|
|||||||
- id: huggingface@thebloke/wizardlm-7b-v1.0-uncensored-ggml/wizardlm-7b-v1.0-uncensored.ggmlv3.q4_k_m.bin
|
|
||||||
name: "gpt-4"
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
- id: model-gallery@stablediffusion
|
|
||||||
- id: model-gallery@voice-en-us-kathleen-low
|
|
||||||
- url: github:go-skynet/model-gallery/base.yaml
|
|
||||||
name: all-MiniLM-L6-v2
|
|
||||||
overrides:
|
|
||||||
embeddings: true
|
|
||||||
backend: huggingface-embeddings
|
|
||||||
parameters:
|
|
||||||
model: all-MiniLM-L6-v2
|
|
||||||
- id: huggingface@thebloke/wizardlm-7b-v1.0-uncensored-ggml/wizardlm-7b-v1.0-uncensored.ggmlv3.q4_0.bin
|
|
||||||
name: functions
|
|
||||||
overrides:
|
|
||||||
context_size: 2048
|
|
||||||
mirostat: 2
|
|
||||||
mirostat_tau: 5.0
|
|
||||||
mirostat_eta: 0.1
|
|
||||||
template:
|
|
||||||
chat: ""
|
|
||||||
completion: ""
|
|
||||||
roles:
|
|
||||||
assistant: "ASSISTANT:"
|
|
||||||
system: "SYSTEM:"
|
|
||||||
assistant_function_call: "FUNCTION_CALL:"
|
|
||||||
function: "FUNCTION CALL RESULT:"
|
|
||||||
parameters:
|
|
||||||
temperature: 0.1
|
|
||||||
top_k: 40
|
|
||||||
top_p: 0.95
|
|
||||||
function:
|
|
||||||
disable_no_action: true
|
|
||||||
mmap: true
|
|
||||||
f16: true
|
|
||||||
13
core/action/action_suite_test.go
Normal file
13
core/action/action_suite_test.go
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package action_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAction(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Agent Action test suite")
|
||||||
|
}
|
||||||
163
core/action/custom.go
Normal file
163
core/action/custom.go
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/config"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
"github.com/traefik/yaegi/interp"
|
||||||
|
"github.com/traefik/yaegi/stdlib"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewCustom(config map[string]string, goPkgPath string) (*CustomAction, error) {
|
||||||
|
a := &CustomAction{
|
||||||
|
config: config,
|
||||||
|
goPkgPath: goPkgPath,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.initializeInterpreter(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.callInit(); err != nil {
|
||||||
|
xlog.Error("Error calling custom action init", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type CustomAction struct {
|
||||||
|
config map[string]string
|
||||||
|
goPkgPath string
|
||||||
|
i *interp.Interpreter
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CustomAction) callInit() error {
|
||||||
|
if a.i == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
v, err := a.i.Eval(fmt.Sprintf("%s.Init", a.config["name"]))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
run := v.Interface().(func() error)
|
||||||
|
|
||||||
|
return run()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CustomAction) initializeInterpreter() error {
|
||||||
|
if _, exists := a.config["code"]; exists && a.i == nil {
|
||||||
|
unsafe := strings.ToLower(a.config["unsafe"]) == "true"
|
||||||
|
i := interp.New(interp.Options{
|
||||||
|
GoPath: a.goPkgPath,
|
||||||
|
Unrestricted: unsafe,
|
||||||
|
})
|
||||||
|
if err := i.Use(stdlib.Symbols); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, exists := a.config["name"]; !exists {
|
||||||
|
a.config["name"] = "custom"
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := i.Eval(fmt.Sprintf("package %s\n%s", a.config["name"], a.config["code"]))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
a.i = i
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CustomAction) Plannable() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CustomAction) Run(ctx context.Context, params types.ActionParams) (types.ActionResult, error) {
|
||||||
|
v, err := a.i.Eval(fmt.Sprintf("%s.Run", a.config["name"]))
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
run := v.Interface().(func(map[string]interface{}) (string, map[string]interface{}, error))
|
||||||
|
|
||||||
|
res, meta, err := run(params)
|
||||||
|
return types.ActionResult{Result: res, Metadata: meta}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *CustomAction) Definition() types.ActionDefinition {
|
||||||
|
|
||||||
|
v, err := a.i.Eval(fmt.Sprintf("%s.Definition", a.config["name"]))
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error getting custom action definition", "error", err)
|
||||||
|
return types.ActionDefinition{}
|
||||||
|
}
|
||||||
|
|
||||||
|
properties := v.Interface().(func() map[string][]string)
|
||||||
|
|
||||||
|
v, err = a.i.Eval(fmt.Sprintf("%s.RequiredFields", a.config["name"]))
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error getting custom action definition", "error", err)
|
||||||
|
return types.ActionDefinition{}
|
||||||
|
}
|
||||||
|
|
||||||
|
requiredFields := v.Interface().(func() []string)
|
||||||
|
|
||||||
|
prop := map[string]jsonschema.Definition{}
|
||||||
|
|
||||||
|
for k, v := range properties() {
|
||||||
|
if len(v) != 2 {
|
||||||
|
xlog.Error("Invalid property definition", "property", k)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
prop[k] = jsonschema.Definition{
|
||||||
|
Type: jsonschema.DataType(v[0]),
|
||||||
|
Description: v[1],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(a.config["name"]),
|
||||||
|
Description: a.config["description"],
|
||||||
|
Properties: prop,
|
||||||
|
Required: requiredFields(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func CustomConfigMeta() []config.Field {
|
||||||
|
return []config.Field{
|
||||||
|
{
|
||||||
|
Name: "name",
|
||||||
|
Label: "Action Name",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: true,
|
||||||
|
HelpText: "Name of the custom action",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "description",
|
||||||
|
Label: "Description",
|
||||||
|
Type: config.FieldTypeTextarea,
|
||||||
|
HelpText: "Description of the custom action",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "code",
|
||||||
|
Label: "Code",
|
||||||
|
Type: config.FieldTypeTextarea,
|
||||||
|
Required: true,
|
||||||
|
HelpText: "Go code for the custom action",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "unsafe",
|
||||||
|
Label: "Unsafe",
|
||||||
|
Type: config.FieldTypeCheckbox,
|
||||||
|
HelpText: "Allow unsafe code execution",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
87
core/action/custom_test.go
Normal file
87
core/action/custom_test.go
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
package action_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
. "github.com/mudler/LocalAGI/core/action"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Agent custom action", func() {
|
||||||
|
Context("custom action", func() {
|
||||||
|
It("initializes correctly", func() {
|
||||||
|
|
||||||
|
testCode := `
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
)
|
||||||
|
type Params struct {
|
||||||
|
Foo string
|
||||||
|
}
|
||||||
|
|
||||||
|
func Run(config map[string]interface{}) (string, map[string]interface{}, error) {
|
||||||
|
|
||||||
|
p := Params{}
|
||||||
|
b, err := json.Marshal(config)
|
||||||
|
if err != nil {
|
||||||
|
return "",map[string]interface{}{}, err
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(b, &p); err != nil {
|
||||||
|
return "",map[string]interface{}{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return p.Foo,map[string]interface{}{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func Definition() map[string][]string {
|
||||||
|
return map[string][]string{
|
||||||
|
"foo": []string{
|
||||||
|
"string",
|
||||||
|
"The foo value",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func RequiredFields() []string {
|
||||||
|
return []string{"foo"}
|
||||||
|
}
|
||||||
|
|
||||||
|
`
|
||||||
|
|
||||||
|
customAction, err := NewCustom(
|
||||||
|
map[string]string{
|
||||||
|
"code": testCode,
|
||||||
|
"name": "test",
|
||||||
|
"description": "A test action",
|
||||||
|
},
|
||||||
|
"",
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
|
||||||
|
definition := customAction.Definition()
|
||||||
|
Expect(definition).To(Equal(types.ActionDefinition{
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"foo": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The foo value",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"foo"},
|
||||||
|
Name: "test",
|
||||||
|
Description: "A test action",
|
||||||
|
}))
|
||||||
|
|
||||||
|
runResult, err := customAction.Run(context.Background(), types.ActionParams{
|
||||||
|
"Foo": "bar",
|
||||||
|
})
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(runResult.Result).To(Equal("bar"))
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
50
core/action/intention.go
Normal file
50
core/action/intention.go
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewIntention creates a new intention action
|
||||||
|
// The inention action is special as it tries to identify
|
||||||
|
// a tool to use and a reasoning over to use it
|
||||||
|
func NewIntention(s ...string) *IntentAction {
|
||||||
|
return &IntentAction{tools: s}
|
||||||
|
}
|
||||||
|
|
||||||
|
type IntentAction struct {
|
||||||
|
tools []string
|
||||||
|
}
|
||||||
|
type IntentResponse struct {
|
||||||
|
Tool string `json:"tool"`
|
||||||
|
Reasoning string `json:"reasoning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *IntentAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *IntentAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *IntentAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: "pick_tool",
|
||||||
|
Description: "Pick a tool",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"reasoning": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "A detailed reasoning on why you want to call this tool.",
|
||||||
|
},
|
||||||
|
"tool": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The tool you want to use",
|
||||||
|
Enum: a.tools,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"tool", "reasoning"},
|
||||||
|
}
|
||||||
|
}
|
||||||
42
core/action/newconversation.go
Normal file
42
core/action/newconversation.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
const ConversationActionName = "new_conversation"
|
||||||
|
|
||||||
|
func NewConversation() *ConversationAction {
|
||||||
|
return &ConversationAction{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ConversationAction struct{}
|
||||||
|
|
||||||
|
type ConversationActionResponse struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ConversationAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ConversationAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ConversationAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: ConversationActionName,
|
||||||
|
Description: "Use this tool to initiate a new conversation or to notify something.",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"message": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The message to start the conversation",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"message"},
|
||||||
|
}
|
||||||
|
}
|
||||||
32
core/action/noreply.go
Normal file
32
core/action/noreply.go
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
)
|
||||||
|
|
||||||
|
// StopActionName is the name of the action
|
||||||
|
// used by the LLM to stop any further action
|
||||||
|
const StopActionName = "stop"
|
||||||
|
|
||||||
|
func NewStop() *StopAction {
|
||||||
|
return &StopAction{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type StopAction struct{}
|
||||||
|
|
||||||
|
func (a *StopAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *StopAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *StopAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: StopActionName,
|
||||||
|
Description: "Use this tool to stop any further action and stop the conversation. You must use this when it looks like there is a conclusion to the conversation or the topic diverged too much from the original conversation. For instance if the user offer his help and you already replied with a message, you can use this tool to stop the conversation.",
|
||||||
|
}
|
||||||
|
}
|
||||||
71
core/action/plan.go
Normal file
71
core/action/plan.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PlanActionName is the name of the plan action
|
||||||
|
// used by the LLM to schedule more actions
|
||||||
|
const PlanActionName = "plan"
|
||||||
|
|
||||||
|
func NewPlan(plannableActions []string) *PlanAction {
|
||||||
|
return &PlanAction{
|
||||||
|
plannables: plannableActions,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type PlanAction struct {
|
||||||
|
plannables []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type PlanResult struct {
|
||||||
|
Subtasks []PlanSubtask `json:"subtasks"`
|
||||||
|
Goal string `json:"goal"`
|
||||||
|
}
|
||||||
|
type PlanSubtask struct {
|
||||||
|
Action string `json:"action"`
|
||||||
|
Reasoning string `json:"reasoning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *PlanAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *PlanAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *PlanAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: PlanActionName,
|
||||||
|
Description: "Use this tool for solving complex tasks that involves calling more tools in sequence.",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"subtasks": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Description: "The subtasks to be executed",
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"action": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The action to call",
|
||||||
|
Enum: a.plannables,
|
||||||
|
},
|
||||||
|
"reasoning": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The reasoning for calling this action",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"goal": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The goal of this plan",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"subtasks", "goal"},
|
||||||
|
}
|
||||||
|
}
|
||||||
43
core/action/reasoning.go
Normal file
43
core/action/reasoning.go
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewReasoning creates a new reasoning action
|
||||||
|
// The reasoning action is special as it tries to force the LLM
|
||||||
|
// to think about what to do next
|
||||||
|
func NewReasoning() *ReasoningAction {
|
||||||
|
return &ReasoningAction{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReasoningAction struct{}
|
||||||
|
|
||||||
|
type ReasoningResponse struct {
|
||||||
|
Reasoning string `json:"reasoning"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReasoningAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReasoningAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReasoningAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: "pick_action",
|
||||||
|
Description: "try to understand what's the best thing to do and pick an action with a reasoning",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"reasoning": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "A detailed reasoning on what would you do in this situation.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"reasoning"},
|
||||||
|
}
|
||||||
|
}
|
||||||
45
core/action/reply.go
Normal file
45
core/action/reply.go
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReplyActionName is the name of the reply action
|
||||||
|
// used by the LLM to reply to the user without
|
||||||
|
// any additional processing
|
||||||
|
const ReplyActionName = "reply"
|
||||||
|
|
||||||
|
func NewReply() *ReplyAction {
|
||||||
|
return &ReplyAction{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReplyAction struct{}
|
||||||
|
|
||||||
|
type ReplyResponse struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReplyAction) Run(context.Context, types.ActionParams) (string, error) {
|
||||||
|
return "no-op", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReplyAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *ReplyAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: ReplyActionName,
|
||||||
|
Description: "Use this tool to reply to the user once we have all the informations we need.",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"message": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The message to reply with",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"message"},
|
||||||
|
}
|
||||||
|
}
|
||||||
98
core/action/state.go
Normal file
98
core/action/state.go
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
package action
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
const StateActionName = "update_state"
|
||||||
|
|
||||||
|
func NewState() *StateAction {
|
||||||
|
return &StateAction{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type StateAction struct{}
|
||||||
|
|
||||||
|
// State is the structure
|
||||||
|
// that is used to keep track of the current state
|
||||||
|
// and the Agent's short memory that it can update
|
||||||
|
// Besides a long term memory that is accessible by the agent (With vector database),
|
||||||
|
// And a context memory (that is always powered by a vector database),
|
||||||
|
// this memory is the shorter one that the LLM keeps across conversation and across its
|
||||||
|
// reasoning process's and life time.
|
||||||
|
// TODO: A special action is then used to let the LLM itself update its memory
|
||||||
|
// periodically during self-processing, and the same action is ALSO exposed
|
||||||
|
// during the conversation to let the user put for example, a new goal to the agent.
|
||||||
|
type AgentInternalState struct {
|
||||||
|
NowDoing string `json:"doing_now"`
|
||||||
|
DoingNext string `json:"doing_next"`
|
||||||
|
DoneHistory []string `json:"done_history"`
|
||||||
|
Memories []string `json:"memories"`
|
||||||
|
Goal string `json:"goal"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *StateAction) Run(context.Context, types.ActionParams) (types.ActionResult, error) {
|
||||||
|
return types.ActionResult{Result: "internal state has been updated"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *StateAction) Plannable() bool {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *StateAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: StateActionName,
|
||||||
|
Description: "update the agent state (short memory) with the current state of the conversation.",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"goal": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The current goal of the agent.",
|
||||||
|
},
|
||||||
|
"doing_next": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The next action the agent will do.",
|
||||||
|
},
|
||||||
|
"done_history": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.String,
|
||||||
|
},
|
||||||
|
Description: "A list of actions that the agent has done.",
|
||||||
|
},
|
||||||
|
"now_doing": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The current action the agent is doing.",
|
||||||
|
},
|
||||||
|
"memories": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.String,
|
||||||
|
},
|
||||||
|
Description: "A list of memories to keep between conversations.",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const fmtT = `=====================
|
||||||
|
NowDoing: %s
|
||||||
|
DoingNext: %s
|
||||||
|
Your current goal is: %s
|
||||||
|
You have done: %+v
|
||||||
|
You have a short memory with: %+v
|
||||||
|
=====================
|
||||||
|
`
|
||||||
|
|
||||||
|
func (c AgentInternalState) String() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
fmtT,
|
||||||
|
c.NowDoing,
|
||||||
|
c.DoingNext,
|
||||||
|
c.Goal,
|
||||||
|
c.DoneHistory,
|
||||||
|
c.Memories,
|
||||||
|
)
|
||||||
|
}
|
||||||
467
core/agent/actions.go
Normal file
467
core/agent/actions.go
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/action"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
type decisionResult struct {
|
||||||
|
actionParams types.ActionParams
|
||||||
|
message string
|
||||||
|
actioName string
|
||||||
|
}
|
||||||
|
|
||||||
|
// decision forces the agent to take one of the available actions
|
||||||
|
func (a *Agent) decision(
|
||||||
|
ctx context.Context,
|
||||||
|
conversation []openai.ChatCompletionMessage,
|
||||||
|
tools []openai.Tool, toolchoice any, maxRetries int) (*decisionResult, error) {
|
||||||
|
|
||||||
|
var lastErr error
|
||||||
|
for attempts := 0; attempts < maxRetries; attempts++ {
|
||||||
|
decision := openai.ChatCompletionRequest{
|
||||||
|
Model: a.options.LLMAPI.Model,
|
||||||
|
Messages: conversation,
|
||||||
|
Tools: tools,
|
||||||
|
ToolChoice: toolchoice,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := a.client.CreateChatCompletion(ctx, decision)
|
||||||
|
if err != nil {
|
||||||
|
lastErr = err
|
||||||
|
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Choices) != 1 {
|
||||||
|
lastErr = fmt.Errorf("no choices: %d", len(resp.Choices))
|
||||||
|
xlog.Warn("Attempt to make a decision failed", "attempt", attempts+1, "error", lastErr)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := resp.Choices[0].Message
|
||||||
|
if len(msg.ToolCalls) != 1 {
|
||||||
|
if err := a.saveConversation(append(conversation, msg), "decision"); err != nil {
|
||||||
|
xlog.Error("Error saving conversation", "error", err)
|
||||||
|
}
|
||||||
|
return &decisionResult{message: msg.Content}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
params := types.ActionParams{}
|
||||||
|
if err := params.Read(msg.ToolCalls[0].Function.Arguments); err != nil {
|
||||||
|
lastErr = err
|
||||||
|
xlog.Warn("Attempt to parse action parameters failed", "attempt", attempts+1, "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.saveConversation(append(conversation, msg), "decision"); err != nil {
|
||||||
|
xlog.Error("Error saving conversation", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &decisionResult{actionParams: params, actioName: msg.ToolCalls[0].Function.Name, message: msg.Content}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("failed to make a decision after %d attempts: %w", maxRetries, lastErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
type Messages []openai.ChatCompletionMessage
|
||||||
|
|
||||||
|
func (m Messages) ToOpenAI() []openai.ChatCompletionMessage {
|
||||||
|
return []openai.ChatCompletionMessage(m)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) String() string {
|
||||||
|
s := ""
|
||||||
|
for _, cc := range m {
|
||||||
|
s += cc.Role + ": " + cc.Content + "\n"
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) Exist(content string) bool {
|
||||||
|
for _, cc := range m {
|
||||||
|
if cc.Content == content {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) RemoveLastUserMessage() Messages {
|
||||||
|
if len(m) == 0 {
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := len(m) - 1; i >= 0; i-- {
|
||||||
|
if m[i].Role == UserRole {
|
||||||
|
return append(m[:i], m[i+1:]...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) Save(path string) error {
|
||||||
|
content, err := json.MarshalIndent(m, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := os.Create(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
if _, err := f.Write(content); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) GetLatestUserMessage() *openai.ChatCompletionMessage {
|
||||||
|
for i := len(m) - 1; i >= 0; i-- {
|
||||||
|
msg := m[i]
|
||||||
|
if msg.Role == UserRole {
|
||||||
|
return &msg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Messages) IsLastMessageFromRole(role string) bool {
|
||||||
|
if len(m) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
return m[len(m)-1].Role == role
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) generateParameters(ctx context.Context, pickTemplate string, act types.Action, c []openai.ChatCompletionMessage, reasoning string, maxAttempts int) (*decisionResult, error) {
|
||||||
|
stateHUD, err := renderTemplate(pickTemplate, a.prepareHUD(), a.availableActions(), reasoning)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
conversation := c
|
||||||
|
if !Messages(c).Exist(stateHUD) && a.options.enableHUD {
|
||||||
|
conversation = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "system",
|
||||||
|
Content: stateHUD,
|
||||||
|
},
|
||||||
|
}, conversation...)
|
||||||
|
}
|
||||||
|
|
||||||
|
cc := conversation
|
||||||
|
if a.options.forceReasoning {
|
||||||
|
cc = append(conversation, openai.ChatCompletionMessage{
|
||||||
|
Role: "system",
|
||||||
|
Content: fmt.Sprintf("The agent decided to use the tool %s with the following reasoning: %s", act.Definition().Name, reasoning),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
var result *decisionResult
|
||||||
|
var attemptErr error
|
||||||
|
|
||||||
|
for attempts := 0; attempts < maxAttempts; attempts++ {
|
||||||
|
result, attemptErr = a.decision(ctx,
|
||||||
|
cc,
|
||||||
|
a.availableActions().ToTools(),
|
||||||
|
openai.ToolChoice{
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: openai.ToolFunction{Name: act.Definition().Name.String()},
|
||||||
|
},
|
||||||
|
maxAttempts,
|
||||||
|
)
|
||||||
|
if attemptErr == nil && result.actionParams != nil {
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
xlog.Warn("Attempt to generate parameters failed", "attempt", attempts+1, "error", attemptErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, fmt.Errorf("failed to generate parameters after %d attempts: %w", maxAttempts, attemptErr)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) handlePlanning(ctx context.Context, job *types.Job, chosenAction types.Action, actionParams types.ActionParams, reasoning string, pickTemplate string, conv Messages) (Messages, error) {
|
||||||
|
// Planning: run all the actions in sequence
|
||||||
|
if !chosenAction.Definition().Name.Is(action.PlanActionName) {
|
||||||
|
xlog.Debug("no plan action")
|
||||||
|
return conv, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("[planning]...")
|
||||||
|
planResult := action.PlanResult{}
|
||||||
|
if err := actionParams.Unmarshal(&planResult); err != nil {
|
||||||
|
return conv, fmt.Errorf("error unmarshalling plan result: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stateResult := types.ActionState{
|
||||||
|
ActionCurrentState: types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: chosenAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: reasoning,
|
||||||
|
},
|
||||||
|
ActionResult: types.ActionResult{
|
||||||
|
Result: fmt.Sprintf("planning %s, subtasks: %+v", planResult.Goal, planResult.Subtasks),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
job.Result.SetResult(stateResult)
|
||||||
|
job.CallbackWithResult(stateResult)
|
||||||
|
|
||||||
|
xlog.Info("[Planning] starts", "agent", a.Character.Name, "goal", planResult.Goal)
|
||||||
|
for _, s := range planResult.Subtasks {
|
||||||
|
xlog.Info("[Planning] subtask", "agent", a.Character.Name, "action", s.Action, "reasoning", s.Reasoning)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(planResult.Subtasks) == 0 {
|
||||||
|
return conv, fmt.Errorf("no subtasks")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Execute all subtasks in sequence
|
||||||
|
for _, subtask := range planResult.Subtasks {
|
||||||
|
xlog.Info("[subtask] Generating parameters",
|
||||||
|
"agent", a.Character.Name,
|
||||||
|
"action", subtask.Action,
|
||||||
|
"reasoning", reasoning,
|
||||||
|
)
|
||||||
|
|
||||||
|
subTaskAction := a.availableActions().Find(subtask.Action)
|
||||||
|
subTaskReasoning := fmt.Sprintf("%s Overall goal is: %s", subtask.Reasoning, planResult.Goal)
|
||||||
|
|
||||||
|
params, err := a.generateParameters(ctx, pickTemplate, subTaskAction, conv, subTaskReasoning, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
return conv, fmt.Errorf("error generating action's parameters: %w", err)
|
||||||
|
|
||||||
|
}
|
||||||
|
actionParams = params.actionParams
|
||||||
|
|
||||||
|
if !job.Callback(types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: subTaskAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: subTaskReasoning,
|
||||||
|
}) {
|
||||||
|
job.Result.SetResult(types.ActionState{
|
||||||
|
ActionCurrentState: types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: chosenAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: subTaskReasoning,
|
||||||
|
},
|
||||||
|
ActionResult: types.ActionResult{
|
||||||
|
Result: "stopped by callback",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
result, err := a.runAction(ctx, subTaskAction, actionParams)
|
||||||
|
if err != nil {
|
||||||
|
return conv, fmt.Errorf("error running action: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stateResult := types.ActionState{
|
||||||
|
ActionCurrentState: types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: subTaskAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: subTaskReasoning,
|
||||||
|
},
|
||||||
|
ActionResult: result,
|
||||||
|
}
|
||||||
|
job.Result.SetResult(stateResult)
|
||||||
|
job.CallbackWithResult(stateResult)
|
||||||
|
xlog.Debug("[subtask] Action executed", "agent", a.Character.Name, "action", subTaskAction.Definition().Name, "result", result)
|
||||||
|
conv = a.addFunctionResultToConversation(subTaskAction, actionParams, result, conv)
|
||||||
|
}
|
||||||
|
|
||||||
|
return conv, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) availableActions() types.Actions {
|
||||||
|
// defaultActions := append(a.options.userActions, action.NewReply())
|
||||||
|
|
||||||
|
addPlanAction := func(actions types.Actions) types.Actions {
|
||||||
|
if !a.options.canPlan {
|
||||||
|
return actions
|
||||||
|
}
|
||||||
|
plannablesActions := []string{}
|
||||||
|
for _, a := range actions {
|
||||||
|
if a.Plannable() {
|
||||||
|
plannablesActions = append(plannablesActions, a.Definition().Name.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
planAction := action.NewPlan(plannablesActions)
|
||||||
|
actions = append(actions, planAction)
|
||||||
|
return actions
|
||||||
|
}
|
||||||
|
|
||||||
|
defaultActions := append(a.mcpActions, a.options.userActions...)
|
||||||
|
|
||||||
|
if a.options.initiateConversations && a.selfEvaluationInProgress { // && self-evaluation..
|
||||||
|
acts := append(defaultActions, action.NewConversation())
|
||||||
|
if a.options.enableHUD {
|
||||||
|
acts = append(acts, action.NewState())
|
||||||
|
}
|
||||||
|
//if a.options.canStopItself {
|
||||||
|
// acts = append(acts, action.NewStop())
|
||||||
|
// }
|
||||||
|
|
||||||
|
return addPlanAction(acts)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.options.canStopItself {
|
||||||
|
acts := append(defaultActions, action.NewStop())
|
||||||
|
if a.options.enableHUD {
|
||||||
|
acts = append(acts, action.NewState())
|
||||||
|
}
|
||||||
|
return addPlanAction(acts)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.options.enableHUD {
|
||||||
|
return addPlanAction(append(defaultActions, action.NewState()))
|
||||||
|
}
|
||||||
|
|
||||||
|
return addPlanAction(defaultActions)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) prepareHUD() (promptHUD *PromptHUD) {
|
||||||
|
if !a.options.enableHUD {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &PromptHUD{
|
||||||
|
Character: a.Character,
|
||||||
|
CurrentState: *a.currentState,
|
||||||
|
PermanentGoal: a.options.permanentGoal,
|
||||||
|
ShowCharacter: a.options.showCharacter,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// pickAction picks an action based on the conversation
|
||||||
|
func (a *Agent) pickAction(ctx context.Context, templ string, messages []openai.ChatCompletionMessage, maxRetries int) (types.Action, types.ActionParams, string, error) {
|
||||||
|
c := messages
|
||||||
|
|
||||||
|
if !a.options.forceReasoning {
|
||||||
|
// We also could avoid to use functions here and get just a reply from the LLM
|
||||||
|
// and then use the reply to get the action
|
||||||
|
thought, err := a.decision(ctx,
|
||||||
|
messages,
|
||||||
|
a.availableActions().ToTools(),
|
||||||
|
nil,
|
||||||
|
maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug(fmt.Sprintf("thought action Name: %v", thought.actioName))
|
||||||
|
xlog.Debug(fmt.Sprintf("thought message: %v", thought.message))
|
||||||
|
|
||||||
|
// Find the action
|
||||||
|
chosenAction := a.availableActions().Find(thought.actioName)
|
||||||
|
if chosenAction == nil || thought.actioName == "" {
|
||||||
|
xlog.Debug("no answer")
|
||||||
|
|
||||||
|
// LLM replied with an answer?
|
||||||
|
//fmt.Errorf("no action found for intent:" + thought.actioName)
|
||||||
|
return nil, nil, thought.message, nil
|
||||||
|
}
|
||||||
|
xlog.Debug(fmt.Sprintf("chosenAction: %v", chosenAction.Definition().Name))
|
||||||
|
return chosenAction, thought.actionParams, thought.message, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
prompt, err := renderTemplate(templ, a.prepareHUD(), a.availableActions(), "")
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
// Get the LLM to think on what to do
|
||||||
|
// and have a thought
|
||||||
|
if !Messages(c).Exist(prompt) {
|
||||||
|
c = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "system",
|
||||||
|
Content: prompt,
|
||||||
|
},
|
||||||
|
}, c...)
|
||||||
|
}
|
||||||
|
|
||||||
|
// We also could avoid to use functions here and get just a reply from the LLM
|
||||||
|
// and then use the reply to get the action
|
||||||
|
thought, err := a.decision(ctx,
|
||||||
|
c,
|
||||||
|
types.Actions{action.NewReasoning()}.ToTools(),
|
||||||
|
action.NewReasoning().Definition().Name, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
reason := ""
|
||||||
|
response := &action.ReasoningResponse{}
|
||||||
|
if thought.actionParams != nil {
|
||||||
|
if err := thought.actionParams.Unmarshal(response); err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
reason = response.Reasoning
|
||||||
|
}
|
||||||
|
if thought.message != "" {
|
||||||
|
reason = thought.message
|
||||||
|
}
|
||||||
|
|
||||||
|
// From the thought, get the action call
|
||||||
|
// Get all the available actions IDs
|
||||||
|
actionsID := []string{}
|
||||||
|
for _, m := range a.availableActions() {
|
||||||
|
actionsID = append(actionsID, m.Definition().Name.String())
|
||||||
|
}
|
||||||
|
intentionsTools := action.NewIntention(actionsID...)
|
||||||
|
|
||||||
|
//XXX: Why we add the reason here?
|
||||||
|
params, err := a.decision(ctx,
|
||||||
|
append(c, openai.ChatCompletionMessage{
|
||||||
|
Role: "system",
|
||||||
|
Content: "Given the assistant thought, pick the relevant action: " + reason,
|
||||||
|
}),
|
||||||
|
types.Actions{intentionsTools}.ToTools(),
|
||||||
|
intentionsTools.Definition().Name, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", fmt.Errorf("failed to get the action tool parameters: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
actionChoice := action.IntentResponse{}
|
||||||
|
|
||||||
|
if params.actionParams == nil {
|
||||||
|
return nil, nil, params.message, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err = params.actionParams.Unmarshal(&actionChoice)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if actionChoice.Tool == "" || actionChoice.Tool == "none" {
|
||||||
|
return nil, nil, "", fmt.Errorf("no intent detected")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find the action
|
||||||
|
chosenAction := a.availableActions().Find(actionChoice.Tool)
|
||||||
|
if chosenAction == nil {
|
||||||
|
return nil, nil, "", fmt.Errorf("no action found for intent:" + actionChoice.Tool)
|
||||||
|
}
|
||||||
|
|
||||||
|
return chosenAction, nil, actionChoice.Reasoning, nil
|
||||||
|
}
|
||||||
961
core/agent/agent.go
Normal file
961
core/agent/agent.go
Normal file
@@ -0,0 +1,961 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/action"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/llm"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
UserRole = "user"
|
||||||
|
AssistantRole = "assistant"
|
||||||
|
SystemRole = "system"
|
||||||
|
maxRetries = 5
|
||||||
|
)
|
||||||
|
|
||||||
|
type Agent struct {
|
||||||
|
sync.Mutex
|
||||||
|
options *options
|
||||||
|
Character Character
|
||||||
|
client *openai.Client
|
||||||
|
jobQueue chan *types.Job
|
||||||
|
context *types.ActionContext
|
||||||
|
|
||||||
|
currentState *action.AgentInternalState
|
||||||
|
|
||||||
|
selfEvaluationInProgress bool
|
||||||
|
pause bool
|
||||||
|
|
||||||
|
newConversations chan openai.ChatCompletionMessage
|
||||||
|
|
||||||
|
mcpActions types.Actions
|
||||||
|
|
||||||
|
subscriberMutex sync.Mutex
|
||||||
|
newMessagesSubscribers []func(openai.ChatCompletionMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
type RAGDB interface {
|
||||||
|
Store(s string) error
|
||||||
|
Reset() error
|
||||||
|
Search(s string, similarEntries int) ([]string, error)
|
||||||
|
Count() int
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(opts ...Option) (*Agent, error) {
|
||||||
|
options, err := newOptions(opts...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to set options: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
client := llm.NewClient(options.LLMAPI.APIKey, options.LLMAPI.APIURL, options.timeout)
|
||||||
|
|
||||||
|
c := context.Background()
|
||||||
|
if options.context != nil {
|
||||||
|
c = options.context
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithCancel(c)
|
||||||
|
a := &Agent{
|
||||||
|
jobQueue: make(chan *types.Job),
|
||||||
|
options: options,
|
||||||
|
client: client,
|
||||||
|
Character: options.character,
|
||||||
|
currentState: &action.AgentInternalState{},
|
||||||
|
context: types.NewActionContext(ctx, cancel),
|
||||||
|
newConversations: make(chan openai.ChatCompletionMessage),
|
||||||
|
newMessagesSubscribers: options.newConversationsSubscribers,
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.options.statefile != "" {
|
||||||
|
if _, err := os.Stat(a.options.statefile); err == nil {
|
||||||
|
if err = a.LoadState(a.options.statefile); err != nil {
|
||||||
|
return a, fmt.Errorf("failed to load state: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// var programLevel = new(xlog.LevelVar) // Info by default
|
||||||
|
// h := xlog.NewTextHandler(os.Stdout, &xlog.HandlerOptions{Level: programLevel})
|
||||||
|
// xlog = xlog.New(h)
|
||||||
|
//programLevel.Set(a.options.logLevel)
|
||||||
|
|
||||||
|
if err := a.prepareIdentity(); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to prepare identity: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info("Populating actions from MCP Servers (if any)")
|
||||||
|
a.initMCPActions()
|
||||||
|
xlog.Info("Done populating actions from MCP Servers")
|
||||||
|
|
||||||
|
xlog.Info(
|
||||||
|
"Agent created",
|
||||||
|
"agent", a.Character.Name,
|
||||||
|
"character", a.Character.String(),
|
||||||
|
"state", a.State().String(),
|
||||||
|
"goal", a.options.permanentGoal,
|
||||||
|
"model", a.options.LLMAPI.Model,
|
||||||
|
)
|
||||||
|
|
||||||
|
return a, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) startNewConversationsConsumer() {
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case <-a.context.Done():
|
||||||
|
return
|
||||||
|
|
||||||
|
case msg := <-a.newConversations:
|
||||||
|
xlog.Debug("New conversation", "agent", a.Character.Name, "message", msg.Content)
|
||||||
|
a.subscriberMutex.Lock()
|
||||||
|
subs := a.newMessagesSubscribers
|
||||||
|
a.subscriberMutex.Unlock()
|
||||||
|
for _, s := range subs {
|
||||||
|
s(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) AddSubscriber(f func(openai.ChatCompletionMessage)) {
|
||||||
|
a.subscriberMutex.Lock()
|
||||||
|
defer a.subscriberMutex.Unlock()
|
||||||
|
a.newMessagesSubscribers = append(a.newMessagesSubscribers, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Context() context.Context {
|
||||||
|
return a.context.Context
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ask is a blocking call that returns the response as soon as it's ready.
|
||||||
|
// It discards any other computation.
|
||||||
|
func (a *Agent) Ask(opts ...types.JobOption) *types.JobResult {
|
||||||
|
xlog.Debug("Agent Ask()", "agent", a.Character.Name, "model", a.options.LLMAPI.Model)
|
||||||
|
defer func() {
|
||||||
|
xlog.Debug("Agent has finished being asked", "agent", a.Character.Name)
|
||||||
|
}()
|
||||||
|
|
||||||
|
return a.Execute(types.NewJob(
|
||||||
|
append(
|
||||||
|
opts,
|
||||||
|
types.WithReasoningCallback(a.options.reasoningCallback),
|
||||||
|
types.WithResultCallback(a.options.resultCallback),
|
||||||
|
)...,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ask is a pre-emptive, blocking call that returns the response as soon as it's ready.
|
||||||
|
// It discards any other computation.
|
||||||
|
func (a *Agent) Execute(j *types.Job) *types.JobResult {
|
||||||
|
xlog.Debug("Agent Execute()", "agent", a.Character.Name, "model", a.options.LLMAPI.Model)
|
||||||
|
defer func() {
|
||||||
|
xlog.Debug("Agent has finished", "agent", a.Character.Name)
|
||||||
|
}()
|
||||||
|
|
||||||
|
a.Enqueue(j)
|
||||||
|
return j.Result.WaitResult()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Enqueue(j *types.Job) {
|
||||||
|
j.ReasoningCallback = a.options.reasoningCallback
|
||||||
|
j.ResultCallback = a.options.resultCallback
|
||||||
|
|
||||||
|
a.jobQueue <- j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) askLLM(ctx context.Context, conversation []openai.ChatCompletionMessage, maxRetries int) (openai.ChatCompletionMessage, error) {
|
||||||
|
var resp openai.ChatCompletionResponse
|
||||||
|
var err error
|
||||||
|
|
||||||
|
for attempt := 0; attempt <= maxRetries; attempt++ {
|
||||||
|
resp, err = a.client.CreateChatCompletion(ctx,
|
||||||
|
openai.ChatCompletionRequest{
|
||||||
|
Model: a.options.LLMAPI.Model,
|
||||||
|
Messages: conversation,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err == nil && len(resp.Choices) == 1 && resp.Choices[0].Message.Content != "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
xlog.Warn("Error asking LLM, retrying", "attempt", attempt+1, "error", err)
|
||||||
|
if attempt < maxRetries {
|
||||||
|
time.Sleep(2 * time.Second) // Optional: Add a delay between retries
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return openai.ChatCompletionMessage{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Choices) != 1 {
|
||||||
|
return openai.ChatCompletionMessage{}, fmt.Errorf("not enough choices: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp.Choices[0].Message, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var ErrContextCanceled = fmt.Errorf("context canceled")
|
||||||
|
|
||||||
|
func (a *Agent) Stop() {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
xlog.Debug("Stopping agent", "agent", a.Character.Name)
|
||||||
|
a.context.Cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Pause() {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
a.pause = true
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Resume() {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
a.pause = false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Paused() bool {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
return a.pause
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Memory() RAGDB {
|
||||||
|
return a.options.ragdb
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) runAction(ctx context.Context, chosenAction types.Action, params types.ActionParams) (result types.ActionResult, err error) {
|
||||||
|
for _, act := range a.availableActions() {
|
||||||
|
if act.Definition().Name == chosenAction.Definition().Name {
|
||||||
|
res, err := act.Run(ctx, params)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("error running action: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
result = res
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info("Running action", "action", chosenAction.Definition().Name, "agent", a.Character.Name)
|
||||||
|
|
||||||
|
if chosenAction.Definition().Name.Is(action.StateActionName) {
|
||||||
|
// We need to store the result in the state
|
||||||
|
state := action.AgentInternalState{}
|
||||||
|
|
||||||
|
err = params.Unmarshal(&state)
|
||||||
|
if err != nil {
|
||||||
|
return types.ActionResult{}, fmt.Errorf("error unmarshalling state of the agent: %w", err)
|
||||||
|
}
|
||||||
|
// update the current state with the one we just got from the action
|
||||||
|
a.currentState = &state
|
||||||
|
|
||||||
|
// update the state file
|
||||||
|
if a.options.statefile != "" {
|
||||||
|
if err := a.SaveState(a.options.statefile); err != nil {
|
||||||
|
return types.ActionResult{}, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) processPrompts(conversation Messages) Messages {
|
||||||
|
//if job.Image != "" {
|
||||||
|
// TODO: Use llava to explain the image content
|
||||||
|
//}
|
||||||
|
// Add custom prompts
|
||||||
|
for _, prompt := range a.options.prompts {
|
||||||
|
message, err := prompt.Render(a)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error rendering prompt", "error", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if message == "" {
|
||||||
|
xlog.Debug("Prompt is empty, skipping", "agent", a.Character.Name)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !conversation.Exist(a.options.systemPrompt) {
|
||||||
|
conversation = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: prompt.Role(),
|
||||||
|
Content: message,
|
||||||
|
}}, conversation...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: move to a Promptblock?
|
||||||
|
if a.options.systemPrompt != "" {
|
||||||
|
if !conversation.Exist(a.options.systemPrompt) {
|
||||||
|
conversation = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "system",
|
||||||
|
Content: a.options.systemPrompt,
|
||||||
|
}}, conversation...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return conversation
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) describeImage(ctx context.Context, model, imageURL string) (string, error) {
|
||||||
|
xlog.Debug("Describing image", "model", model, "image", imageURL)
|
||||||
|
resp, err := a.client.CreateChatCompletion(ctx,
|
||||||
|
openai.ChatCompletionRequest{
|
||||||
|
Model: model,
|
||||||
|
Messages: []openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
|
||||||
|
Role: "user",
|
||||||
|
MultiContent: []openai.ChatMessagePart{
|
||||||
|
{
|
||||||
|
Type: openai.ChatMessagePartTypeText,
|
||||||
|
Text: "What is in the image?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: openai.ChatMessagePartTypeImageURL,
|
||||||
|
ImageURL: &openai.ChatMessageImageURL{
|
||||||
|
|
||||||
|
URL: imageURL,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if len(resp.Choices) == 0 {
|
||||||
|
return "", fmt.Errorf("no choices")
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("Described image", "description", resp.Choices[0].Message.Content)
|
||||||
|
return resp.Choices[0].Message.Content, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func extractImageContent(message openai.ChatCompletionMessage) (imageURL, text string, e error) {
|
||||||
|
e = fmt.Errorf("no image found")
|
||||||
|
if message.MultiContent != nil {
|
||||||
|
for _, content := range message.MultiContent {
|
||||||
|
if content.Type == openai.ChatMessagePartTypeImageURL {
|
||||||
|
imageURL = content.ImageURL.URL
|
||||||
|
e = nil
|
||||||
|
}
|
||||||
|
if content.Type == openai.ChatMessagePartTypeText {
|
||||||
|
text = content.Text
|
||||||
|
e = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) processUserInputs(job *types.Job, role string, conv Messages) Messages {
|
||||||
|
|
||||||
|
// walk conversation history, and check if last message from user contains image.
|
||||||
|
// If it does, we need to describe the image first with a model that supports image understanding (if the current model doesn't support it)
|
||||||
|
// and add it to the conversation context
|
||||||
|
if !a.options.SeparatedMultimodalModel() {
|
||||||
|
return conv
|
||||||
|
}
|
||||||
|
lastUserMessage := conv.GetLatestUserMessage()
|
||||||
|
if lastUserMessage != nil && conv.IsLastMessageFromRole(UserRole) {
|
||||||
|
imageURL, text, err := extractImageContent(*lastUserMessage)
|
||||||
|
if err == nil {
|
||||||
|
// We have an image, we need to describe it first
|
||||||
|
// and add it to the conversation context
|
||||||
|
imageDescription, err := a.describeImage(a.context.Context, a.options.LLMAPI.MultimodalModel, imageURL)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error describing image", "error", err)
|
||||||
|
} else {
|
||||||
|
// We replace the user message with the image description
|
||||||
|
// and add the user text to the conversation
|
||||||
|
explainerMessage := openai.ChatCompletionMessage{
|
||||||
|
Role: "system",
|
||||||
|
Content: fmt.Sprintf("The user shared an image which can be described as: %s", imageDescription),
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove lastUserMessage from the conversation
|
||||||
|
conv = conv.RemoveLastUserMessage()
|
||||||
|
conv = append(conv, explainerMessage)
|
||||||
|
conv = append(conv, openai.ChatCompletionMessage{
|
||||||
|
Role: role,
|
||||||
|
Content: text,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return conv
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) consumeJob(job *types.Job, role string) {
|
||||||
|
|
||||||
|
if err := job.GetContext().Err(); err != nil {
|
||||||
|
job.Result.Finish(fmt.Errorf("expired"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
a.Lock()
|
||||||
|
paused := a.pause
|
||||||
|
a.Unlock()
|
||||||
|
|
||||||
|
if paused {
|
||||||
|
xlog.Info("Agent is paused, skipping job", "agent", a.Character.Name)
|
||||||
|
job.Result.Finish(fmt.Errorf("agent is paused"))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// We are self evaluating if we consume the job as a system role
|
||||||
|
selfEvaluation := role == SystemRole
|
||||||
|
|
||||||
|
conv := job.ConversationHistory
|
||||||
|
|
||||||
|
a.Lock()
|
||||||
|
a.selfEvaluationInProgress = selfEvaluation
|
||||||
|
a.Unlock()
|
||||||
|
defer job.Cancel()
|
||||||
|
|
||||||
|
if selfEvaluation {
|
||||||
|
defer func() {
|
||||||
|
a.Lock()
|
||||||
|
a.selfEvaluationInProgress = false
|
||||||
|
a.Unlock()
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
conv = a.processPrompts(conv)
|
||||||
|
conv = a.processUserInputs(job, role, conv)
|
||||||
|
|
||||||
|
// RAG
|
||||||
|
a.knowledgeBaseLookup(conv)
|
||||||
|
|
||||||
|
var pickTemplate string
|
||||||
|
var reEvaluationTemplate string
|
||||||
|
|
||||||
|
if selfEvaluation {
|
||||||
|
pickTemplate = pickSelfTemplate
|
||||||
|
reEvaluationTemplate = reSelfEvalTemplate
|
||||||
|
} else {
|
||||||
|
pickTemplate = pickActionTemplate
|
||||||
|
reEvaluationTemplate = reEvalTemplate
|
||||||
|
}
|
||||||
|
|
||||||
|
// choose an action first
|
||||||
|
var chosenAction types.Action
|
||||||
|
var reasoning string
|
||||||
|
var actionParams types.ActionParams
|
||||||
|
|
||||||
|
if job.HasNextAction() {
|
||||||
|
// if we are being re-evaluated, we already have the action
|
||||||
|
// and the reasoning. Consume it here and reset it
|
||||||
|
action, params, reason := job.GetNextAction()
|
||||||
|
chosenAction = *action
|
||||||
|
reasoning = reason
|
||||||
|
if params == nil {
|
||||||
|
p, err := a.generateParameters(job.GetContext(), pickTemplate, chosenAction, conv, reasoning, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error generating parameters, trying again", "error", err)
|
||||||
|
// try again
|
||||||
|
job.SetNextAction(&chosenAction, nil, reasoning)
|
||||||
|
a.consumeJob(job, role)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
actionParams = p.actionParams
|
||||||
|
} else {
|
||||||
|
actionParams = *params
|
||||||
|
}
|
||||||
|
job.ResetNextAction()
|
||||||
|
} else {
|
||||||
|
var err error
|
||||||
|
chosenAction, actionParams, reasoning, err = a.pickAction(job.GetContext(), pickTemplate, conv, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error picking action", "error", err)
|
||||||
|
job.Result.Finish(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if the agent is looping over the same action
|
||||||
|
// if so, we need to stop it
|
||||||
|
if a.options.loopDetectionSteps > 0 && len(job.GetPastActions()) > 0 {
|
||||||
|
count := map[string]int{}
|
||||||
|
for i := len(job.GetPastActions()) - 1; i >= 0; i-- {
|
||||||
|
pastAction := job.GetPastActions()[i]
|
||||||
|
if pastAction.Action.Definition().Name == chosenAction.Definition().Name &&
|
||||||
|
pastAction.Params.String() == actionParams.String() {
|
||||||
|
count[chosenAction.Definition().Name.String()]++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if count[chosenAction.Definition().Name.String()] > a.options.loopDetectionSteps {
|
||||||
|
xlog.Info("Loop detected, stopping agent", "agent", a.Character.Name, "action", chosenAction.Definition().Name)
|
||||||
|
a.reply(job, role, conv, actionParams, chosenAction, reasoning)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//xlog.Debug("Picked action", "agent", a.Character.Name, "action", chosenAction.Definition().Name, "reasoning", reasoning)
|
||||||
|
if chosenAction == nil {
|
||||||
|
// If no action was picked up, the reasoning is the message returned by the assistant
|
||||||
|
// so we can consume it as if it was a reply.
|
||||||
|
//job.Result.SetResult(ActionState{ActionCurrentState{nil, nil, "No action to do, just reply"}, ""})
|
||||||
|
//job.Result.Finish(fmt.Errorf("no action to do"))\
|
||||||
|
xlog.Info("No action to do, just reply", "agent", a.Character.Name, "reasoning", reasoning)
|
||||||
|
|
||||||
|
conv = append(conv, openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: reasoning,
|
||||||
|
})
|
||||||
|
|
||||||
|
xlog.Debug("Finish job with reasoning", "reasoning", reasoning, "agent", a.Character.Name, "conversation", fmt.Sprintf("%+v", conv))
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.AddFinalizer(func(conv []openai.ChatCompletionMessage) {
|
||||||
|
a.saveCurrentConversation(conv)
|
||||||
|
})
|
||||||
|
job.Result.SetResponse(reasoning)
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if chosenAction.Definition().Name.Is(action.StopActionName) {
|
||||||
|
xlog.Info("LLM decided to stop")
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we force a reasoning, we need to generate the parameters
|
||||||
|
if a.options.forceReasoning || actionParams == nil {
|
||||||
|
xlog.Info("Generating parameters",
|
||||||
|
"agent", a.Character.Name,
|
||||||
|
"action", chosenAction.Definition().Name,
|
||||||
|
"reasoning", reasoning,
|
||||||
|
)
|
||||||
|
|
||||||
|
params, err := a.generateParameters(job.GetContext(), pickTemplate, chosenAction, conv, reasoning, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error generating parameters, trying again", "error", err)
|
||||||
|
// try again
|
||||||
|
job.SetNextAction(&chosenAction, nil, reasoning)
|
||||||
|
a.consumeJob(job, role)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
actionParams = params.actionParams
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info(
|
||||||
|
"Generated parameters",
|
||||||
|
"agent", a.Character.Name,
|
||||||
|
"action", chosenAction.Definition().Name,
|
||||||
|
"reasoning", reasoning,
|
||||||
|
"params", actionParams.String(),
|
||||||
|
)
|
||||||
|
|
||||||
|
if actionParams == nil {
|
||||||
|
job.Result.Finish(fmt.Errorf("no parameters"))
|
||||||
|
xlog.Error("No parameters", "agent", a.Character.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
job.AddPastAction(chosenAction, &actionParams)
|
||||||
|
|
||||||
|
if !job.Callback(types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: chosenAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: reasoning}) {
|
||||||
|
job.Result.SetResult(types.ActionState{
|
||||||
|
ActionCurrentState: types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: chosenAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: reasoning,
|
||||||
|
},
|
||||||
|
ActionResult: types.ActionResult{Result: "stopped by callback"}})
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
|
conv, err = a.handlePlanning(job.GetContext(), job, chosenAction, actionParams, reasoning, pickTemplate, conv)
|
||||||
|
if err != nil {
|
||||||
|
job.Result.Finish(fmt.Errorf("error running action: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if selfEvaluation && a.options.initiateConversations &&
|
||||||
|
chosenAction.Definition().Name.Is(action.ConversationActionName) {
|
||||||
|
|
||||||
|
xlog.Info("LLM decided to initiate a new conversation", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
message := action.ConversationActionResponse{}
|
||||||
|
if err := actionParams.Unmarshal(&message); err != nil {
|
||||||
|
xlog.Error("Error unmarshalling conversation response", "error", err)
|
||||||
|
job.Result.Finish(fmt.Errorf("error unmarshalling conversation response: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: message.Message,
|
||||||
|
}
|
||||||
|
|
||||||
|
go func(agent *Agent) {
|
||||||
|
xlog.Info("Sending new conversation to channel", "agent", agent.Character.Name, "message", msg.Content)
|
||||||
|
agent.newConversations <- msg
|
||||||
|
}(a)
|
||||||
|
|
||||||
|
job.Result.Conversation = []openai.ChatCompletionMessage{
|
||||||
|
msg,
|
||||||
|
}
|
||||||
|
job.Result.SetResponse("decided to initiate a new conversation")
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we have a reply action, we need to run it
|
||||||
|
if chosenAction.Definition().Name.Is(action.ReplyActionName) {
|
||||||
|
a.reply(job, role, conv, actionParams, chosenAction, reasoning)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if !chosenAction.Definition().Name.Is(action.PlanActionName) {
|
||||||
|
result, err := a.runAction(job.GetContext(), chosenAction, actionParams)
|
||||||
|
if err != nil {
|
||||||
|
//job.Result.Finish(fmt.Errorf("error running action: %w", err))
|
||||||
|
//return
|
||||||
|
// make the LLM aware of the error of running the action instead of stopping the job here
|
||||||
|
result.Result = fmt.Sprintf("Error running tool: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stateResult := types.ActionState{
|
||||||
|
ActionCurrentState: types.ActionCurrentState{
|
||||||
|
Job: job,
|
||||||
|
Action: chosenAction,
|
||||||
|
Params: actionParams,
|
||||||
|
Reasoning: reasoning,
|
||||||
|
},
|
||||||
|
ActionResult: result,
|
||||||
|
}
|
||||||
|
job.Result.SetResult(stateResult)
|
||||||
|
job.CallbackWithResult(stateResult)
|
||||||
|
xlog.Debug("Action executed", "agent", a.Character.Name, "action", chosenAction.Definition().Name, "result", result)
|
||||||
|
|
||||||
|
conv = a.addFunctionResultToConversation(chosenAction, actionParams, result, conv)
|
||||||
|
}
|
||||||
|
|
||||||
|
//conv = append(conv, messages...)
|
||||||
|
//conv = messages
|
||||||
|
|
||||||
|
// given the result, we can now ask OpenAI to complete the conversation or
|
||||||
|
// to continue using another tool given the result
|
||||||
|
followingAction, followingParams, reasoning, err := a.pickAction(job.GetContext(), reEvaluationTemplate, conv, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(fmt.Errorf("error picking action: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if followingAction != nil &&
|
||||||
|
!followingAction.Definition().Name.Is(action.ReplyActionName) &&
|
||||||
|
!chosenAction.Definition().Name.Is(action.ReplyActionName) {
|
||||||
|
|
||||||
|
xlog.Info("Following action", "action", followingAction.Definition().Name, "agent", a.Character.Name)
|
||||||
|
|
||||||
|
// We need to do another action (?)
|
||||||
|
// The agent decided to do another action
|
||||||
|
// call ourselves again
|
||||||
|
job.SetNextAction(&followingAction, &followingParams, reasoning)
|
||||||
|
a.consumeJob(job, role)
|
||||||
|
return
|
||||||
|
} else if followingAction == nil {
|
||||||
|
xlog.Info("Not following another action", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
if !a.options.forceReasoning {
|
||||||
|
xlog.Info("Finish conversation with reasoning", "reasoning", reasoning, "agent", a.Character.Name)
|
||||||
|
|
||||||
|
msg := openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: reasoning,
|
||||||
|
}
|
||||||
|
|
||||||
|
conv = append(conv, msg)
|
||||||
|
job.Result.SetResponse(msg.Content)
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.AddFinalizer(func(conv []openai.ChatCompletionMessage) {
|
||||||
|
a.saveCurrentConversation(conv)
|
||||||
|
})
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
a.reply(job, role, conv, actionParams, chosenAction, reasoning)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) reply(job *types.Job, role string, conv Messages, actionParams types.ActionParams, chosenAction types.Action, reasoning string) {
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
|
||||||
|
// At this point can only be a reply action
|
||||||
|
xlog.Info("Computing reply", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
// decode the response
|
||||||
|
replyResponse := action.ReplyResponse{}
|
||||||
|
|
||||||
|
if err := actionParams.Unmarshal(&replyResponse); err != nil {
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(fmt.Errorf("error unmarshalling reply response: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we have already a reply from the action, just return it.
|
||||||
|
// Otherwise generate a full conversation to get a proper message response
|
||||||
|
// if chosenAction.Definition().Name.Is(action.ReplyActionName) {
|
||||||
|
// replyResponse := action.ReplyResponse{}
|
||||||
|
// if err := params.actionParams.Unmarshal(&replyResponse); err != nil {
|
||||||
|
// job.Result.Finish(fmt.Errorf("error unmarshalling reply response: %w", err))
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
// if replyResponse.Message != "" {
|
||||||
|
// job.Result.SetResponse(replyResponse.Message)
|
||||||
|
// job.Result.Finish(nil)
|
||||||
|
// return
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
// If we have a hud, display it when answering normally
|
||||||
|
if a.options.enableHUD {
|
||||||
|
prompt, err := renderTemplate(hudTemplate, a.prepareHUD(), a.availableActions(), reasoning)
|
||||||
|
if err != nil {
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(fmt.Errorf("error renderTemplate: %w", err))
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !Messages(conv).Exist(prompt) {
|
||||||
|
conv = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "system",
|
||||||
|
Content: prompt,
|
||||||
|
},
|
||||||
|
}, conv...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate a human-readable response
|
||||||
|
// resp, err := a.client.CreateChatCompletion(ctx,
|
||||||
|
// openai.ChatCompletionRequest{
|
||||||
|
// Model: a.options.LLMAPI.Model,
|
||||||
|
// Messages: append(conv,
|
||||||
|
// openai.ChatCompletionMessage{
|
||||||
|
// Role: "system",
|
||||||
|
// Content: "Assistant thought: " + replyResponse.Message,
|
||||||
|
// },
|
||||||
|
// ),
|
||||||
|
// },
|
||||||
|
// )
|
||||||
|
|
||||||
|
if replyResponse.Message != "" {
|
||||||
|
xlog.Info("Return reply message", "reply", replyResponse.Message, "agent", a.Character.Name)
|
||||||
|
|
||||||
|
msg := openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: replyResponse.Message,
|
||||||
|
}
|
||||||
|
|
||||||
|
conv = append(conv, msg)
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.SetResponse(msg.Content)
|
||||||
|
job.Result.AddFinalizer(func(conv []openai.ChatCompletionMessage) {
|
||||||
|
a.saveCurrentConversation(conv)
|
||||||
|
})
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info("Reasoning, ask LLM for a reply", "agent", a.Character.Name)
|
||||||
|
xlog.Debug("Conversation", "conversation", fmt.Sprintf("%+v", conv))
|
||||||
|
msg, err := a.askLLM(job.GetContext(), conv, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.Finish(err)
|
||||||
|
xlog.Error("Error asking LLM for a reply", "error", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we didn't got any message, we can use the response from the action
|
||||||
|
if chosenAction.Definition().Name.Is(action.ReplyActionName) && msg.Content == "" {
|
||||||
|
xlog.Info("No output returned from conversation, using the action response as a reply " + replyResponse.Message)
|
||||||
|
|
||||||
|
msg = openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
Content: replyResponse.Message,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
conv = append(conv, msg)
|
||||||
|
job.Result.SetResponse(msg.Content)
|
||||||
|
xlog.Info("Response from LLM", "response", msg.Content, "agent", a.Character.Name)
|
||||||
|
job.Result.Conversation = conv
|
||||||
|
job.Result.AddFinalizer(func(conv []openai.ChatCompletionMessage) {
|
||||||
|
a.saveCurrentConversation(conv)
|
||||||
|
})
|
||||||
|
job.Result.Finish(nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) addFunctionResultToConversation(chosenAction types.Action, actionParams types.ActionParams, result types.ActionResult, conv Messages) Messages {
|
||||||
|
// calling the function
|
||||||
|
conv = append(conv, openai.ChatCompletionMessage{
|
||||||
|
Role: "assistant",
|
||||||
|
ToolCalls: []openai.ToolCall{
|
||||||
|
{
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: openai.FunctionCall{
|
||||||
|
Name: chosenAction.Definition().Name.String(),
|
||||||
|
Arguments: actionParams.String(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
// result of calling the function
|
||||||
|
conv = append(conv, openai.ChatCompletionMessage{
|
||||||
|
Role: openai.ChatMessageRoleTool,
|
||||||
|
Content: result.Result,
|
||||||
|
Name: chosenAction.Definition().Name.String(),
|
||||||
|
ToolCallID: chosenAction.Definition().Name.String(),
|
||||||
|
})
|
||||||
|
|
||||||
|
return conv
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is running in the background.
|
||||||
|
func (a *Agent) periodicallyRun(timer *time.Timer) {
|
||||||
|
// Remember always to reset the timer - if we don't the agent will stop..
|
||||||
|
defer timer.Reset(a.options.periodicRuns)
|
||||||
|
|
||||||
|
xlog.Debug("Agent is running periodically", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
// TODO: Would be nice if we have a special action to
|
||||||
|
// contact the user. This would actually make sure that
|
||||||
|
// if the agent wants to initiate a conversation, it can do so.
|
||||||
|
// This would be a special action that would be picked up by the agent
|
||||||
|
// and would be used to contact the user.
|
||||||
|
|
||||||
|
// if len(conv()) != 0 {
|
||||||
|
// // Here the LLM could decide to store some part of the conversation too in the memory
|
||||||
|
// evaluateMemory := NewJob(
|
||||||
|
// WithText(
|
||||||
|
// `Evaluate the current conversation and decide if we need to store some relevant informations from it`,
|
||||||
|
// ),
|
||||||
|
// WithReasoningCallback(a.options.reasoningCallback),
|
||||||
|
// WithResultCallback(a.options.resultCallback),
|
||||||
|
// )
|
||||||
|
// a.consumeJob(evaluateMemory, SystemRole)
|
||||||
|
|
||||||
|
// a.ResetConversation()
|
||||||
|
// }
|
||||||
|
|
||||||
|
if !a.options.standaloneJob {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
xlog.Info("Periodically running", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
// Here we go in a loop of
|
||||||
|
// - asking the agent to do something
|
||||||
|
// - evaluating the result
|
||||||
|
// - asking the agent to do something else based on the result
|
||||||
|
|
||||||
|
// whatNext := NewJob(WithText("Decide what to do based on the state"))
|
||||||
|
whatNext := types.NewJob(
|
||||||
|
types.WithText(innerMonologueTemplate),
|
||||||
|
types.WithReasoningCallback(a.options.reasoningCallback),
|
||||||
|
types.WithResultCallback(a.options.resultCallback),
|
||||||
|
)
|
||||||
|
a.consumeJob(whatNext, SystemRole)
|
||||||
|
|
||||||
|
xlog.Info("STOP -- Periodically run is done", "agent", a.Character.Name)
|
||||||
|
|
||||||
|
// Save results from state
|
||||||
|
|
||||||
|
// a.ResetConversation()
|
||||||
|
|
||||||
|
// doWork := NewJob(WithText("Select the tool to use based on your goal and the current state."))
|
||||||
|
// a.consumeJob(doWork, SystemRole)
|
||||||
|
|
||||||
|
// results := []string{}
|
||||||
|
// for _, v := range doWork.Result.State {
|
||||||
|
// results = append(results, v.Result)
|
||||||
|
// }
|
||||||
|
|
||||||
|
// a.ResetConversation()
|
||||||
|
|
||||||
|
// // Here the LLM could decide to do something based on the result of our automatic action
|
||||||
|
// evaluateAction := NewJob(
|
||||||
|
// WithText(
|
||||||
|
// `Evaluate the current situation and decide if we need to execute other tools (for instance to store results into permanent, or short memory).
|
||||||
|
// We have done the following actions:
|
||||||
|
// ` + strings.Join(results, "\n"),
|
||||||
|
// ))
|
||||||
|
// a.consumeJob(evaluateAction, SystemRole)
|
||||||
|
|
||||||
|
// a.ResetConversation()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) Run() error {
|
||||||
|
|
||||||
|
a.startNewConversationsConsumer()
|
||||||
|
xlog.Debug("Agent is now running", "agent", a.Character.Name)
|
||||||
|
// The agent run does two things:
|
||||||
|
// picks up requests from a queue
|
||||||
|
// and generates a response/perform actions
|
||||||
|
|
||||||
|
// It is also preemptive.
|
||||||
|
// That is, it can interrupt the current action
|
||||||
|
// if another one comes in.
|
||||||
|
|
||||||
|
// If there is no action, periodically evaluate if it has to do something on its own.
|
||||||
|
|
||||||
|
// Expose a REST API to interact with the agent to ask it things
|
||||||
|
|
||||||
|
//todoTimer := time.NewTicker(a.options.periodicRuns)
|
||||||
|
timer := time.NewTimer(a.options.periodicRuns)
|
||||||
|
for {
|
||||||
|
xlog.Debug("Agent is now waiting for a new job", "agent", a.Character.Name)
|
||||||
|
select {
|
||||||
|
case job := <-a.jobQueue:
|
||||||
|
a.loop(timer, job)
|
||||||
|
case <-a.context.Done():
|
||||||
|
// Agent has been canceled, return error
|
||||||
|
xlog.Warn("Agent has been canceled", "agent", a.Character.Name)
|
||||||
|
return ErrContextCanceled
|
||||||
|
case <-timer.C:
|
||||||
|
a.periodicallyRun(timer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) loop(timer *time.Timer, job *types.Job) {
|
||||||
|
// Remember always to reset the timer - if we don't the agent will stop..
|
||||||
|
defer timer.Reset(a.options.periodicRuns)
|
||||||
|
// Consume the job and generate a response
|
||||||
|
// TODO: Give a short-term memory to the agent
|
||||||
|
// stop and drain the timer
|
||||||
|
if !timer.Stop() {
|
||||||
|
<-timer.C
|
||||||
|
}
|
||||||
|
xlog.Debug("Agent is consuming a job", "agent", a.Character.Name, "job", job)
|
||||||
|
a.consumeJob(job, UserRole)
|
||||||
|
}
|
||||||
27
core/agent/agent_suite_test.go
Normal file
27
core/agent/agent_suite_test.go
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package agent_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestAgent(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Agent test suite")
|
||||||
|
}
|
||||||
|
|
||||||
|
var testModel = os.Getenv("LOCALAGI_MODEL")
|
||||||
|
var apiURL = os.Getenv("LOCALAI_API_URL")
|
||||||
|
var apiKeyURL = os.Getenv("LOCALAI_API_KEY")
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
if testModel == "" {
|
||||||
|
testModel = "hermes-2-pro-mistral"
|
||||||
|
}
|
||||||
|
if apiURL == "" {
|
||||||
|
apiURL = "http://192.168.68.113:8080"
|
||||||
|
}
|
||||||
|
}
|
||||||
346
core/agent/agent_test.go
Normal file
346
core/agent/agent_test.go
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
package agent_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
"github.com/mudler/LocalAGI/services/actions"
|
||||||
|
|
||||||
|
. "github.com/mudler/LocalAGI/core/agent"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
const testActionResult = "In Boston it's 30C today, it's sunny, and humidity is at 98%"
|
||||||
|
const testActionResult2 = "In milan it's very hot today, it is 45C and the humidity is at 200%"
|
||||||
|
const testActionResult3 = "In paris it's very cold today, it is 2C and the humidity is at 10%"
|
||||||
|
|
||||||
|
var _ types.Action = &TestAction{}
|
||||||
|
|
||||||
|
var debugOptions = []types.JobOption{
|
||||||
|
types.WithReasoningCallback(func(state types.ActionCurrentState) bool {
|
||||||
|
xlog.Info("Reasoning", state)
|
||||||
|
return true
|
||||||
|
}),
|
||||||
|
types.WithResultCallback(func(state types.ActionState) {
|
||||||
|
xlog.Info("Reasoning", state.Reasoning)
|
||||||
|
xlog.Info("Action", state.Action)
|
||||||
|
xlog.Info("Result", state.Result)
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
type TestAction struct {
|
||||||
|
response map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *TestAction) Plannable() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *TestAction) Run(c context.Context, p types.ActionParams) (types.ActionResult, error) {
|
||||||
|
for k, r := range a.response {
|
||||||
|
if strings.Contains(strings.ToLower(p.String()), strings.ToLower(k)) {
|
||||||
|
return types.ActionResult{Result: r}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return types.ActionResult{Result: "No match"}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *TestAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: "get_weather",
|
||||||
|
Description: "get current weather",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"location": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The city and state, e.g. San Francisco, CA",
|
||||||
|
},
|
||||||
|
"unit": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Enum: []string{"celsius", "fahrenheit"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
Required: []string{"location"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type FakeStoreResultAction struct {
|
||||||
|
TestAction
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *FakeStoreResultAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: "store_results",
|
||||||
|
Description: "store results permanently. Use this tool after you have a result you want to keep.",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"term": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "What to store permanently",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
Required: []string{"term"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type FakeInternetAction struct {
|
||||||
|
TestAction
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *FakeInternetAction) Definition() types.ActionDefinition {
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: "search_internet",
|
||||||
|
Description: "search on internet",
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"term": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "What to search for",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
Required: []string{"term"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("Agent test", func() {
|
||||||
|
Context("jobs", func() {
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
Eventually(func() error {
|
||||||
|
// test apiURL is working and available
|
||||||
|
_, err := http.Get(apiURL + "/readyz")
|
||||||
|
return err
|
||||||
|
}, "10m", "10s").ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("pick the correct action", func() {
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
WithLoopDetectionSteps(3),
|
||||||
|
// WithRandomIdentity(),
|
||||||
|
WithActions(&TestAction{response: map[string]string{
|
||||||
|
"boston": testActionResult,
|
||||||
|
"milan": testActionResult2,
|
||||||
|
"paris": testActionResult3,
|
||||||
|
}}),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
|
||||||
|
res := agent.Ask(
|
||||||
|
append(debugOptions,
|
||||||
|
types.WithText("what's the weather in Boston and Milano? Use celsius units"),
|
||||||
|
)...,
|
||||||
|
)
|
||||||
|
Expect(res.Error).ToNot(HaveOccurred())
|
||||||
|
reasons := []string{}
|
||||||
|
for _, r := range res.State {
|
||||||
|
|
||||||
|
reasons = append(reasons, r.Result)
|
||||||
|
}
|
||||||
|
Expect(reasons).To(ContainElement(testActionResult), fmt.Sprint(res))
|
||||||
|
Expect(reasons).To(ContainElement(testActionResult2), fmt.Sprint(res))
|
||||||
|
reasons = []string{}
|
||||||
|
|
||||||
|
res = agent.Ask(
|
||||||
|
append(debugOptions,
|
||||||
|
types.WithText("Now I want to know the weather in Paris, always use celsius units"),
|
||||||
|
)...)
|
||||||
|
for _, r := range res.State {
|
||||||
|
|
||||||
|
reasons = append(reasons, r.Result)
|
||||||
|
}
|
||||||
|
//Expect(reasons).ToNot(ContainElement(testActionResult), fmt.Sprint(res))
|
||||||
|
//Expect(reasons).ToNot(ContainElement(testActionResult2), fmt.Sprint(res))
|
||||||
|
Expect(reasons).To(ContainElement(testActionResult3), fmt.Sprint(res))
|
||||||
|
// conversation := agent.CurrentConversation()
|
||||||
|
// for _, r := range res.State {
|
||||||
|
// reasons = append(reasons, r.Result)
|
||||||
|
// }
|
||||||
|
// Expect(len(conversation)).To(Equal(10), fmt.Sprint(conversation))
|
||||||
|
})
|
||||||
|
It("pick the correct action", func() {
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
|
||||||
|
// WithRandomIdentity(),
|
||||||
|
WithActions(&TestAction{response: map[string]string{
|
||||||
|
"boston": testActionResult,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
res := agent.Ask(
|
||||||
|
append(debugOptions,
|
||||||
|
types.WithText("can you get the weather in boston? Use celsius units"))...,
|
||||||
|
)
|
||||||
|
reasons := []string{}
|
||||||
|
for _, r := range res.State {
|
||||||
|
reasons = append(reasons, r.Result)
|
||||||
|
}
|
||||||
|
Expect(reasons).To(ContainElement(testActionResult), fmt.Sprint(res))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("updates the state with internal actions", func() {
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
EnableHUD,
|
||||||
|
// EnableStandaloneJob,
|
||||||
|
// WithRandomIdentity(),
|
||||||
|
WithPermanentGoal("I want to learn to play music"),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
|
||||||
|
result := agent.Ask(
|
||||||
|
types.WithText("Update your goals such as you want to learn to play the guitar"),
|
||||||
|
)
|
||||||
|
fmt.Printf("%+v\n", result)
|
||||||
|
Expect(result.Error).ToNot(HaveOccurred())
|
||||||
|
Expect(agent.State().Goal).To(ContainSubstring("guitar"), fmt.Sprint(agent.State()))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Can generate a plan", func() {
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
WithLLMAPIKey(apiKeyURL),
|
||||||
|
WithTimeout("10m"),
|
||||||
|
WithActions(
|
||||||
|
actions.NewSearch(map[string]string{}),
|
||||||
|
),
|
||||||
|
EnablePlanning,
|
||||||
|
EnableForceReasoning,
|
||||||
|
// EnableStandaloneJob,
|
||||||
|
// WithRandomIdentity(),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
|
||||||
|
result := agent.Ask(
|
||||||
|
types.WithText("plan a trip to San Francisco from Venice, Italy"),
|
||||||
|
)
|
||||||
|
Expect(len(result.State)).To(BeNumerically(">", 1))
|
||||||
|
|
||||||
|
actionsExecuted := []string{}
|
||||||
|
for _, r := range result.State {
|
||||||
|
xlog.Info(r.Result)
|
||||||
|
actionsExecuted = append(actionsExecuted, r.Action.Definition().Name.String())
|
||||||
|
}
|
||||||
|
Expect(actionsExecuted).To(ContainElement("search_internet"), fmt.Sprint(result))
|
||||||
|
Expect(actionsExecuted).To(ContainElement("plan"), fmt.Sprint(result))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("Can initiate conversations", func() {
|
||||||
|
|
||||||
|
message := openai.ChatCompletionMessage{}
|
||||||
|
mu := &sync.Mutex{}
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
WithLLMAPIKey(apiKeyURL),
|
||||||
|
WithNewConversationSubscriber(func(m openai.ChatCompletionMessage) {
|
||||||
|
mu.Lock()
|
||||||
|
message = m
|
||||||
|
mu.Unlock()
|
||||||
|
}),
|
||||||
|
WithActions(
|
||||||
|
actions.NewSearch(map[string]string{}),
|
||||||
|
),
|
||||||
|
EnablePlanning,
|
||||||
|
EnableForceReasoning,
|
||||||
|
EnableInitiateConversations,
|
||||||
|
EnableStandaloneJob,
|
||||||
|
EnableHUD,
|
||||||
|
WithPeriodicRuns("1s"),
|
||||||
|
WithPermanentGoal("use the new_conversation tool"),
|
||||||
|
// EnableStandaloneJob,
|
||||||
|
// WithRandomIdentity(),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
|
||||||
|
Eventually(func() string {
|
||||||
|
mu.Lock()
|
||||||
|
defer mu.Unlock()
|
||||||
|
return message.Content
|
||||||
|
}, "10m", "10s").ShouldNot(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
/*
|
||||||
|
It("it automatically performs things in the background", func() {
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
EnableHUD,
|
||||||
|
EnableStandaloneJob,
|
||||||
|
WithAgentReasoningCallback(func(state ActionCurrentState) bool {
|
||||||
|
xlog.Info("Reasoning", state)
|
||||||
|
return true
|
||||||
|
}),
|
||||||
|
WithAgentResultCallback(func(state ActionState) {
|
||||||
|
xlog.Info("Reasoning", state.Reasoning)
|
||||||
|
xlog.Info("Action", state.Action)
|
||||||
|
xlog.Info("Result", state.Result)
|
||||||
|
}),
|
||||||
|
WithActions(
|
||||||
|
&FakeInternetAction{
|
||||||
|
TestAction{
|
||||||
|
response:
|
||||||
|
map[string]string{
|
||||||
|
"italy": "The weather in italy is sunny",
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&FakeStoreResultAction{
|
||||||
|
TestAction{
|
||||||
|
response: []string{
|
||||||
|
"Result permanently stored",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
//WithRandomIdentity(),
|
||||||
|
WithPermanentGoal("get the weather of all the cities in italy and store the results"),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
go agent.Run()
|
||||||
|
defer agent.Stop()
|
||||||
|
Eventually(func() string {
|
||||||
|
|
||||||
|
return agent.State().Goal
|
||||||
|
}, "10m", "10s").Should(ContainSubstring("weather"), fmt.Sprint(agent.State()))
|
||||||
|
|
||||||
|
Eventually(func() string {
|
||||||
|
return agent.State().String()
|
||||||
|
}, "10m", "10s").Should(ContainSubstring("store"), fmt.Sprint(agent.State()))
|
||||||
|
|
||||||
|
// result := agent.Ask(
|
||||||
|
// WithText("Update your goals such as you want to learn to play the guitar"),
|
||||||
|
// )
|
||||||
|
// fmt.Printf("%+v\n", result)
|
||||||
|
// Expect(result.Error).ToNot(HaveOccurred())
|
||||||
|
// Expect(agent.State().Goal).To(ContainSubstring("guitar"), fmt.Sprint(agent.State()))
|
||||||
|
})
|
||||||
|
*/
|
||||||
|
})
|
||||||
|
})
|
||||||
53
core/agent/identity.go
Normal file
53
core/agent/identity.go
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/llm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (a *Agent) generateIdentity(guidance string) error {
|
||||||
|
if guidance == "" {
|
||||||
|
guidance = "Generate a random character for roleplaying."
|
||||||
|
}
|
||||||
|
|
||||||
|
err := llm.GenerateTypedJSON(a.context.Context, a.client, "Generate a character as JSON data. "+guidance, a.options.LLMAPI.Model, a.options.character.ToJSONSchema(), &a.options.character)
|
||||||
|
//err := llm.GenerateJSONFromStruct(a.context.Context, a.client, guidance, a.options.LLMAPI.Model, &a.options.character)
|
||||||
|
a.Character = a.options.character
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate JSON from structure: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !a.validCharacter() {
|
||||||
|
return fmt.Errorf("generated character is not valid ( guidance: %s ): %v", guidance, a.Character.String())
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) prepareIdentity() error {
|
||||||
|
if !a.options.randomIdentity {
|
||||||
|
// No identity to generate
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.options.characterfile == "" {
|
||||||
|
return a.generateIdentity(a.options.randomIdentityGuidance)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(a.options.characterfile); err == nil {
|
||||||
|
// if there is a file, load the character back
|
||||||
|
return a.LoadCharacter(a.options.characterfile)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.generateIdentity(a.options.randomIdentityGuidance); err != nil {
|
||||||
|
return fmt.Errorf("failed to generate identity: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherwise save it for next time
|
||||||
|
if err := a.SaveCharacter(a.options.characterfile); err != nil {
|
||||||
|
return fmt.Errorf("failed to save character: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
107
core/agent/knowledgebase.go
Normal file
107
core/agent/knowledgebase.go
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (a *Agent) knowledgeBaseLookup(conv Messages) {
|
||||||
|
if (!a.options.enableKB && !a.options.enableLongTermMemory && !a.options.enableSummaryMemory) ||
|
||||||
|
len(conv) <= 0 {
|
||||||
|
xlog.Debug("[Knowledge Base Lookup] Disabled, skipping", "agent", a.Character.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Walk conversation from bottom to top, and find the first message of the user
|
||||||
|
// to use it as a query to the KB
|
||||||
|
userMessage := conv.GetLatestUserMessage().Content
|
||||||
|
|
||||||
|
xlog.Info("[Knowledge Base Lookup] Last user message", "agent", a.Character.Name, "message", userMessage, "lastMessage", conv.GetLatestUserMessage())
|
||||||
|
|
||||||
|
if userMessage == "" {
|
||||||
|
xlog.Info("[Knowledge Base Lookup] No user message found in conversation", "agent", a.Character.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
results, err := a.options.ragdb.Search(userMessage, a.options.kbResults)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Info("Error finding similar strings inside KB:", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(results) == 0 {
|
||||||
|
xlog.Info("[Knowledge Base Lookup] No similar strings found in KB", "agent", a.Character.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
formatResults := ""
|
||||||
|
for _, r := range results {
|
||||||
|
formatResults += fmt.Sprintf("- %s \n", r)
|
||||||
|
}
|
||||||
|
xlog.Info("[Knowledge Base Lookup] Found similar strings in KB", "agent", a.Character.Name, "results", formatResults)
|
||||||
|
|
||||||
|
// conv = append(conv,
|
||||||
|
// openai.ChatCompletionMessage{
|
||||||
|
// Role: "system",
|
||||||
|
// Content: fmt.Sprintf("Given the user input you have the following in memory:\n%s", formatResults),
|
||||||
|
// },
|
||||||
|
// )
|
||||||
|
conv = append([]openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "system",
|
||||||
|
Content: fmt.Sprintf("Given the user input you have the following in memory:\n%s", formatResults),
|
||||||
|
}}, conv...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) saveConversation(m Messages, prefix string) error {
|
||||||
|
if a.options.conversationsPath == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
dateTime := time.Now().Format("2006-01-02-15-04-05")
|
||||||
|
fileName := a.Character.Name + "-" + dateTime + ".json"
|
||||||
|
if prefix != "" {
|
||||||
|
fileName = prefix + "-" + fileName
|
||||||
|
}
|
||||||
|
os.MkdirAll(a.options.conversationsPath, os.ModePerm)
|
||||||
|
return m.Save(filepath.Join(a.options.conversationsPath, fileName))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) saveCurrentConversation(conv Messages) {
|
||||||
|
|
||||||
|
if err := a.saveConversation(conv, ""); err != nil {
|
||||||
|
xlog.Error("Error saving conversation", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !a.options.enableLongTermMemory && !a.options.enableSummaryMemory {
|
||||||
|
xlog.Debug("Long term memory is disabled", "agent", a.Character.Name)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info("Saving conversation", "agent", a.Character.Name, "conversation size", len(conv))
|
||||||
|
|
||||||
|
if a.options.enableSummaryMemory && len(conv) > 0 {
|
||||||
|
msg, err := a.askLLM(a.context.Context, []openai.ChatCompletionMessage{{
|
||||||
|
Role: "user",
|
||||||
|
Content: "Summarize the conversation below, keep the highlights as a bullet list:\n" + Messages(conv).String(),
|
||||||
|
}}, maxRetries)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Error summarizing conversation", "error", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := a.options.ragdb.Store(msg.Content); err != nil {
|
||||||
|
xlog.Error("Error storing into memory", "error", err)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for _, message := range conv {
|
||||||
|
if message.Role == "user" {
|
||||||
|
if err := a.options.ragdb.Store(message.Content); err != nil {
|
||||||
|
xlog.Error("Error storing into memory", "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
164
core/agent/mcp.go
Normal file
164
core/agent/mcp.go
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
|
||||||
|
mcp "github.com/metoro-io/mcp-golang"
|
||||||
|
"github.com/metoro-io/mcp-golang/transport/http"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ types.Action = &mcpAction{}
|
||||||
|
|
||||||
|
type MCPServer struct {
|
||||||
|
URL string `json:"url"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type mcpAction struct {
|
||||||
|
mcpClient *mcp.Client
|
||||||
|
inputSchema ToolInputSchema
|
||||||
|
toolName string
|
||||||
|
toolDescription string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *mcpAction) Plannable() bool {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mcpAction) Run(ctx context.Context, params types.ActionParams) (types.ActionResult, error) {
|
||||||
|
resp, err := m.mcpClient.CallTool(ctx, m.toolName, params)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Failed to call tool", "error", err.Error())
|
||||||
|
return types.ActionResult{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("MCP response", "response", resp)
|
||||||
|
|
||||||
|
textResult := ""
|
||||||
|
for _, c := range resp.Content {
|
||||||
|
switch c.Type {
|
||||||
|
case mcp.ContentTypeText:
|
||||||
|
textResult += c.TextContent.Text + "\n"
|
||||||
|
case mcp.ContentTypeImage:
|
||||||
|
xlog.Error("Image content not supported yet")
|
||||||
|
case mcp.ContentTypeEmbeddedResource:
|
||||||
|
xlog.Error("Embedded resource content not supported yet")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return types.ActionResult{
|
||||||
|
Result: textResult,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mcpAction) Definition() types.ActionDefinition {
|
||||||
|
props := map[string]jsonschema.Definition{}
|
||||||
|
dat, err := json.Marshal(m.inputSchema.Properties)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Failed to marshal input schema", "error", err.Error())
|
||||||
|
}
|
||||||
|
json.Unmarshal(dat, &props)
|
||||||
|
|
||||||
|
return types.ActionDefinition{
|
||||||
|
Name: types.ActionDefinitionName(m.toolName),
|
||||||
|
Description: m.toolDescription,
|
||||||
|
Required: m.inputSchema.Required,
|
||||||
|
//Properties: ,
|
||||||
|
Properties: props,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ToolInputSchema struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Properties map[string]interface{} `json:"properties,omitempty"`
|
||||||
|
Required []string `json:"required,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) initMCPActions() error {
|
||||||
|
|
||||||
|
a.mcpActions = nil
|
||||||
|
var err error
|
||||||
|
|
||||||
|
generatedActions := types.Actions{}
|
||||||
|
|
||||||
|
for _, mcpServer := range a.options.mcpServers {
|
||||||
|
transport := http.NewHTTPClientTransport("/mcp")
|
||||||
|
transport.WithBaseURL(mcpServer.URL)
|
||||||
|
if mcpServer.Token != "" {
|
||||||
|
transport.WithHeader("Authorization", "Bearer "+mcpServer.Token)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new client
|
||||||
|
client := mcp.NewClient(transport)
|
||||||
|
|
||||||
|
xlog.Debug("Initializing client", "server", mcpServer.URL)
|
||||||
|
// Initialize the client
|
||||||
|
response, e := client.Initialize(a.context)
|
||||||
|
if e != nil {
|
||||||
|
xlog.Error("Failed to initialize client", "error", e.Error(), "server", mcpServer)
|
||||||
|
if err == nil {
|
||||||
|
err = e
|
||||||
|
} else {
|
||||||
|
err = errors.Join(err, e)
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("Client initialized: %v", response.Instructions)
|
||||||
|
|
||||||
|
var cursor *string
|
||||||
|
for {
|
||||||
|
tools, err := client.ListTools(a.context, cursor)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Failed to list tools", "error", err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, t := range tools.Tools {
|
||||||
|
desc := ""
|
||||||
|
if t.Description != nil {
|
||||||
|
desc = *t.Description
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("Tool", "mcpServer", mcpServer, "name", t.Name, "description", desc)
|
||||||
|
|
||||||
|
dat, err := json.Marshal(t.InputSchema)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Failed to marshal input schema", "error", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("Input schema", "mcpServer", mcpServer, "tool", t.Name, "schema", string(dat))
|
||||||
|
|
||||||
|
// XXX: This is a wild guess, to verify (data types might be incompatible)
|
||||||
|
var inputSchema ToolInputSchema
|
||||||
|
err = json.Unmarshal(dat, &inputSchema)
|
||||||
|
if err != nil {
|
||||||
|
xlog.Error("Failed to unmarshal input schema", "error", err.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a new action with Client + tool
|
||||||
|
generatedActions = append(generatedActions, &mcpAction{
|
||||||
|
mcpClient: client,
|
||||||
|
toolName: t.Name,
|
||||||
|
inputSchema: inputSchema,
|
||||||
|
toolDescription: desc,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if tools.NextCursor == nil {
|
||||||
|
break // No more pages
|
||||||
|
}
|
||||||
|
cursor = tools.NextCursor
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
a.mcpActions = generatedActions
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
338
core/agent/options.go
Normal file
338
core/agent/options.go
Normal file
@@ -0,0 +1,338 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Option func(*options) error
|
||||||
|
|
||||||
|
type llmOptions struct {
|
||||||
|
APIURL string
|
||||||
|
APIKey string
|
||||||
|
Model string
|
||||||
|
MultimodalModel string
|
||||||
|
}
|
||||||
|
|
||||||
|
type options struct {
|
||||||
|
LLMAPI llmOptions
|
||||||
|
character Character
|
||||||
|
randomIdentityGuidance string
|
||||||
|
randomIdentity bool
|
||||||
|
userActions types.Actions
|
||||||
|
enableHUD, standaloneJob, showCharacter, enableKB, enableSummaryMemory, enableLongTermMemory bool
|
||||||
|
|
||||||
|
canStopItself bool
|
||||||
|
initiateConversations bool
|
||||||
|
loopDetectionSteps int
|
||||||
|
forceReasoning bool
|
||||||
|
canPlan bool
|
||||||
|
characterfile string
|
||||||
|
statefile string
|
||||||
|
context context.Context
|
||||||
|
permanentGoal string
|
||||||
|
timeout string
|
||||||
|
periodicRuns time.Duration
|
||||||
|
kbResults int
|
||||||
|
ragdb RAGDB
|
||||||
|
|
||||||
|
prompts []DynamicPrompt
|
||||||
|
|
||||||
|
systemPrompt string
|
||||||
|
|
||||||
|
// callbacks
|
||||||
|
reasoningCallback func(types.ActionCurrentState) bool
|
||||||
|
resultCallback func(types.ActionState)
|
||||||
|
|
||||||
|
conversationsPath string
|
||||||
|
|
||||||
|
mcpServers []MCPServer
|
||||||
|
|
||||||
|
newConversationsSubscribers []func(openai.ChatCompletionMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (o *options) SeparatedMultimodalModel() bool {
|
||||||
|
return o.LLMAPI.MultimodalModel != "" && o.LLMAPI.Model != o.LLMAPI.MultimodalModel
|
||||||
|
}
|
||||||
|
|
||||||
|
func defaultOptions() *options {
|
||||||
|
return &options{
|
||||||
|
periodicRuns: 15 * time.Minute,
|
||||||
|
LLMAPI: llmOptions{
|
||||||
|
APIURL: "http://localhost:8080",
|
||||||
|
Model: "gpt-4",
|
||||||
|
},
|
||||||
|
character: Character{
|
||||||
|
Name: "",
|
||||||
|
Age: "",
|
||||||
|
Occupation: "",
|
||||||
|
Hobbies: []string{},
|
||||||
|
MusicTaste: []string{},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newOptions(opts ...Option) (*options, error) {
|
||||||
|
options := defaultOptions()
|
||||||
|
for _, o := range opts {
|
||||||
|
if err := o(options); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return options, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableHUD = func(o *options) error {
|
||||||
|
o.enableHUD = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableForceReasoning = func(o *options) error {
|
||||||
|
o.forceReasoning = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableKnowledgeBase = func(o *options) error {
|
||||||
|
o.enableKB = true
|
||||||
|
o.kbResults = 5
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var CanStopItself = func(o *options) error {
|
||||||
|
o.canStopItself = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithTimeout(timeout string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.timeout = timeout
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithLoopDetectionSteps(steps int) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.loopDetectionSteps = steps
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithConversationsPath(path string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.conversationsPath = path
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func EnableKnowledgeBaseWithResults(results int) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.enableKB = true
|
||||||
|
o.kbResults = results
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithNewConversationSubscriber(sub func(openai.ChatCompletionMessage)) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.newConversationsSubscribers = append(o.newConversationsSubscribers, sub)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableInitiateConversations = func(o *options) error {
|
||||||
|
o.initiateConversations = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnablePlanning = func(o *options) error {
|
||||||
|
o.canPlan = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// EnableStandaloneJob is an option to enable the agent
|
||||||
|
// to run jobs in the background automatically
|
||||||
|
var EnableStandaloneJob = func(o *options) error {
|
||||||
|
o.standaloneJob = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnablePersonality = func(o *options) error {
|
||||||
|
o.showCharacter = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableSummaryMemory = func(o *options) error {
|
||||||
|
o.enableSummaryMemory = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var EnableLongTermMemory = func(o *options) error {
|
||||||
|
o.enableLongTermMemory = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithRAGDB(db RAGDB) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.ragdb = db
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithSystemPrompt(prompt string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.systemPrompt = prompt
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithMCPServers(servers ...MCPServer) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.mcpServers = servers
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithLLMAPIURL(url string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.LLMAPI.APIURL = url
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithStateFile(path string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.statefile = path
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithCharacterFile(path string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.characterfile = path
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithPrompts adds additional block prompts to the agent
|
||||||
|
// to be rendered internally in the conversation
|
||||||
|
// when processing the conversation to the LLM
|
||||||
|
func WithPrompts(prompts ...DynamicPrompt) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.prompts = prompts
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithDynamicPrompts is a helper function to create dynamic prompts
|
||||||
|
// Dynamic prompts contains golang code which is executed dynamically
|
||||||
|
// // to render a prompt to the LLM
|
||||||
|
// func WithDynamicPrompts(prompts ...map[string]string) Option {
|
||||||
|
// return func(o *options) error {
|
||||||
|
// for _, p := range prompts {
|
||||||
|
// prompt, err := NewDynamicPrompt(p, "")
|
||||||
|
// if err != nil {
|
||||||
|
// return err
|
||||||
|
// }
|
||||||
|
// o.prompts = append(o.prompts, prompt)
|
||||||
|
// }
|
||||||
|
// return nil
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
func WithLLMAPIKey(key string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.LLMAPI.APIKey = key
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithMultimodalModel(model string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.LLMAPI.MultimodalModel = model
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithPermanentGoal(goal string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.permanentGoal = goal
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithPeriodicRuns(duration string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
t, err := time.ParseDuration(duration)
|
||||||
|
if err != nil {
|
||||||
|
o.periodicRuns, _ = time.ParseDuration("10m")
|
||||||
|
}
|
||||||
|
o.periodicRuns = t
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithContext(ctx context.Context) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.context = ctx
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithAgentReasoningCallback(cb func(types.ActionCurrentState) bool) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.reasoningCallback = cb
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithAgentResultCallback(cb func(types.ActionState)) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.resultCallback = cb
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithModel(model string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.LLMAPI.Model = model
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithCharacter(c Character) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.character = c
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func FromFile(path string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
c, err := Load(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
o.character = *c
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithRandomIdentity(guidance ...string) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.randomIdentityGuidance = strings.Join(guidance, "")
|
||||||
|
o.randomIdentity = true
|
||||||
|
o.showCharacter = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithActions(actions ...types.Action) Option {
|
||||||
|
return func(o *options) error {
|
||||||
|
o.userActions = actions
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
6
core/agent/prompt.go
Normal file
6
core/agent/prompt.go
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
type DynamicPrompt interface {
|
||||||
|
Render(a *Agent) (string, error)
|
||||||
|
Role() string
|
||||||
|
}
|
||||||
143
core/agent/state.go
Normal file
143
core/agent/state.go
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/action"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PromptHUD contains
|
||||||
|
// all information that should be displayed to the LLM
|
||||||
|
// in the prompts
|
||||||
|
type PromptHUD struct {
|
||||||
|
Character Character `json:"character"`
|
||||||
|
CurrentState action.AgentInternalState `json:"current_state"`
|
||||||
|
PermanentGoal string `json:"permanent_goal"`
|
||||||
|
ShowCharacter bool `json:"show_character"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Character struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Age string `json:"age"`
|
||||||
|
Occupation string `json:"job_occupation"`
|
||||||
|
Hobbies []string `json:"hobbies"`
|
||||||
|
MusicTaste []string `json:"favorites_music_genres"`
|
||||||
|
Sex string `json:"sex"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Character) ToJSONSchema() jsonschema.Definition {
|
||||||
|
return jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"name": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The name of the character",
|
||||||
|
},
|
||||||
|
"age": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The age of the character",
|
||||||
|
},
|
||||||
|
"job_occupation": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The occupation of the character",
|
||||||
|
},
|
||||||
|
"hobbies": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Description: "The hobbies of the character",
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.String,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"favorites_music_genres": {
|
||||||
|
Type: jsonschema.Array,
|
||||||
|
Description: "The favorite music genres of the character",
|
||||||
|
Items: &jsonschema.Definition{
|
||||||
|
Type: jsonschema.String,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"sex": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The character sex (male, female)",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func Load(path string) (*Character, error) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var c Character
|
||||||
|
err = json.Unmarshal(data, &c)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) State() action.AgentInternalState {
|
||||||
|
return *a.currentState
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) LoadState(path string) error {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return json.Unmarshal(data, a.currentState)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) LoadCharacter(path string) error {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return json.Unmarshal(data, &a.Character)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) SaveState(path string) error {
|
||||||
|
os.MkdirAll(filepath.Dir(path), 0755)
|
||||||
|
data, err := json.Marshal(a.currentState)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
os.WriteFile(path, data, 0644)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) SaveCharacter(path string) error {
|
||||||
|
os.MkdirAll(filepath.Dir(path), 0755)
|
||||||
|
data, err := json.Marshal(a.Character)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.WriteFile(path, data, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Agent) validCharacter() bool {
|
||||||
|
return a.Character.Name != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
const fmtT = `=====================
|
||||||
|
Name: %s
|
||||||
|
Age: %s
|
||||||
|
Occupation: %s
|
||||||
|
Hobbies: %v
|
||||||
|
Music taste: %v
|
||||||
|
=====================`
|
||||||
|
|
||||||
|
func (c *Character) String() string {
|
||||||
|
return fmt.Sprintf(
|
||||||
|
fmtT,
|
||||||
|
c.Name,
|
||||||
|
c.Age,
|
||||||
|
c.Occupation,
|
||||||
|
c.Hobbies,
|
||||||
|
c.MusicTaste,
|
||||||
|
)
|
||||||
|
}
|
||||||
55
core/agent/state_test.go
Normal file
55
core/agent/state_test.go
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
package agent_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
. "github.com/mudler/LocalAGI/core/agent"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Agent test", func() {
|
||||||
|
Context("identity", func() {
|
||||||
|
var agent *Agent
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
Eventually(func() error {
|
||||||
|
// test apiURL is working and available
|
||||||
|
_, err := http.Get(apiURL + "/readyz")
|
||||||
|
return err
|
||||||
|
}, "10m", "10s").ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("generates all the fields with random data", func() {
|
||||||
|
var err error
|
||||||
|
agent, err = New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
WithRandomIdentity(),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
By("generating random identity")
|
||||||
|
Expect(agent.Character.Name).ToNot(BeEmpty())
|
||||||
|
Expect(agent.Character.Age).ToNot(BeZero())
|
||||||
|
Expect(agent.Character.Occupation).ToNot(BeEmpty())
|
||||||
|
Expect(agent.Character.Hobbies).ToNot(BeEmpty())
|
||||||
|
Expect(agent.Character.MusicTaste).ToNot(BeEmpty())
|
||||||
|
})
|
||||||
|
It("detect an invalid character", func() {
|
||||||
|
var err error
|
||||||
|
agent, err = New(WithRandomIdentity())
|
||||||
|
Expect(err).To(HaveOccurred())
|
||||||
|
})
|
||||||
|
It("generates all the fields", func() {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
agent, err := New(
|
||||||
|
WithLLMAPIURL(apiURL),
|
||||||
|
WithModel(testModel),
|
||||||
|
WithRandomIdentity("An 90-year old man with a long beard, a wizard, who lives in a tower."),
|
||||||
|
)
|
||||||
|
Expect(err).ToNot(HaveOccurred())
|
||||||
|
Expect(agent.Character.Name).ToNot(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
||||||
103
core/agent/templates.go
Normal file
103
core/agent/templates.go
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
package agent
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"html/template"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
func renderTemplate(templ string, hud *PromptHUD, actions types.Actions, reasoning string) (string, error) {
|
||||||
|
// prepare the prompt
|
||||||
|
prompt := bytes.NewBuffer([]byte{})
|
||||||
|
|
||||||
|
promptTemplate, err := template.New("pickAction").Parse(templ)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get all the actions definitions
|
||||||
|
definitions := []types.ActionDefinition{}
|
||||||
|
for _, m := range actions {
|
||||||
|
definitions = append(definitions, m.Definition())
|
||||||
|
}
|
||||||
|
|
||||||
|
err = promptTemplate.Execute(prompt, struct {
|
||||||
|
HUD *PromptHUD
|
||||||
|
Actions []types.ActionDefinition
|
||||||
|
Reasoning string
|
||||||
|
Messages []openai.ChatCompletionMessage
|
||||||
|
Time string
|
||||||
|
}{
|
||||||
|
Actions: definitions,
|
||||||
|
HUD: hud,
|
||||||
|
Reasoning: reasoning,
|
||||||
|
Time: time.Now().Format(time.RFC3339),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return prompt.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
const innerMonologueTemplate = `"This is not a typical conversation between an assistant and an user.
|
||||||
|
You are thinking out loud by yourself now, and you are evaluating the current situation.
|
||||||
|
Considering the goal and the persistent goal (if you have one) do an action or decide to plan something for later on. If possible for you, you might also decide to engage a conversation with the user by notifying him."`
|
||||||
|
|
||||||
|
const hudTemplate = `{{with .HUD }}{{if .ShowCharacter}}The assistant acts like an human, has a character and the replies and actions might be influenced by it.
|
||||||
|
{{if .Character.Name}}This is the assistant name: {{.Character.Name}}
|
||||||
|
{{end}}{{if .Character.Age}}This is the assistant age: {{.Character.Age}}
|
||||||
|
{{end}}{{if .Character.Occupation}}This is the assistant job: {{.Character.Occupation}}
|
||||||
|
{{end}}{{if .Character.Hobbies}}This is the assistant's hobbies: {{.Character.Hobbies}}
|
||||||
|
{{end}}{{if .Character.MusicTaste}}This is the assistant's music taste: {{.Character.MusicTaste}}
|
||||||
|
{{end}}
|
||||||
|
{{end}}
|
||||||
|
|
||||||
|
This is your current state:
|
||||||
|
NowDoing: {{if .CurrentState.NowDoing}}{{.CurrentState.NowDoing}}{{else}}Nothing{{end}}
|
||||||
|
DoingNext: {{if .CurrentState.DoingNext}}{{.CurrentState.DoingNext}}{{else}}Nothing{{end}}
|
||||||
|
Your permanent goal is: {{if .PermanentGoal}}{{.PermanentGoal}}{{else}}Nothing{{end}}
|
||||||
|
Your current goal is: {{if .CurrentState.Goal}}{{.CurrentState.Goal}}{{else}}Nothing{{end}}
|
||||||
|
You have done: {{range .CurrentState.DoneHistory}}{{.}} {{end}}
|
||||||
|
You have a short memory with: {{range .CurrentState.Memories}}{{.}} {{end}}{{end}}
|
||||||
|
Current time: is {{.Time}}`
|
||||||
|
|
||||||
|
const pickSelfTemplate = `You can take any of the following tools:
|
||||||
|
|
||||||
|
{{range .Actions -}}
|
||||||
|
- {{.Name}}: {{.Description }}
|
||||||
|
{{ end }}
|
||||||
|
|
||||||
|
To finish your session, use the "reply" tool with your answer.
|
||||||
|
|
||||||
|
Act like as a fully autonomous smart AI agent having a character, the character and your state is defined in the message above.
|
||||||
|
You are now self-evaluating what to do next based on the state in the previous message.
|
||||||
|
For example, if the permanent goal is to "make a sandwich", you might want to "get the bread" first, and update the state afterwards by calling two tools in sequence.
|
||||||
|
You can update the short-term goal, the current action, the next action, the history of actions, and the memories.
|
||||||
|
You can't ask things to the user as you are thinking by yourself. You are autonomous.
|
||||||
|
|
||||||
|
{{if .Reasoning}}Reasoning: {{.Reasoning}}{{end}}
|
||||||
|
` + hudTemplate
|
||||||
|
|
||||||
|
const reSelfEvalTemplate = pickSelfTemplate + `
|
||||||
|
|
||||||
|
We already have called other tools. Evaluate the current situation and decide if we need to execute other tools.`
|
||||||
|
|
||||||
|
const pickActionTemplate = hudTemplate + `
|
||||||
|
When you have to pick a tool in the reasoning explain how you would use the tools you'd pick from:
|
||||||
|
|
||||||
|
{{range .Actions -}}
|
||||||
|
- {{.Name}}: {{.Description }}
|
||||||
|
{{ end }}
|
||||||
|
To answer back to the user, use the "reply" or the "answer" tool.
|
||||||
|
Given the text below, decide which action to take and explain the detailed reasoning behind it. For answering without picking a choice, reply with 'none'.
|
||||||
|
|
||||||
|
{{if .Reasoning}}Reasoning: {{.Reasoning}}{{end}}
|
||||||
|
`
|
||||||
|
|
||||||
|
const reEvalTemplate = pickActionTemplate + `
|
||||||
|
|
||||||
|
We already have called other tools. Evaluate the current situation and decide if we need to execute other tools or answer back with a result.`
|
||||||
224
core/sse/sse.go
Normal file
224
core/sse/sse.go
Normal file
@@ -0,0 +1,224 @@
|
|||||||
|
package sse
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/gofiber/fiber/v2"
|
||||||
|
"github.com/valyala/fasthttp"
|
||||||
|
)
|
||||||
|
|
||||||
|
type (
|
||||||
|
// Listener defines the interface for the receiving end.
|
||||||
|
Listener interface {
|
||||||
|
ID() string
|
||||||
|
Chan() chan Envelope
|
||||||
|
}
|
||||||
|
|
||||||
|
// Envelope defines the interface for content that can be broadcast to clients.
|
||||||
|
Envelope interface {
|
||||||
|
String() string // Represent the envelope contents as a string for transmission.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Manager defines the interface for managing clients and broadcasting messages.
|
||||||
|
Manager interface {
|
||||||
|
Send(message Envelope)
|
||||||
|
Handle(ctx *fiber.Ctx, cl Listener)
|
||||||
|
Clients() []string
|
||||||
|
}
|
||||||
|
|
||||||
|
History interface {
|
||||||
|
Add(message Envelope) // Add adds a message to the history.
|
||||||
|
Send(c Listener) // Send sends the history to a client.
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
type Client struct {
|
||||||
|
id string
|
||||||
|
ch chan Envelope
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewClient(id string) Listener {
|
||||||
|
return &Client{
|
||||||
|
id: id,
|
||||||
|
ch: make(chan Envelope, 50),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) ID() string { return c.id }
|
||||||
|
func (c *Client) Chan() chan Envelope { return c.ch }
|
||||||
|
|
||||||
|
// Message represents a simple message implementation.
|
||||||
|
type Message struct {
|
||||||
|
Event string
|
||||||
|
Time time.Time
|
||||||
|
Data string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMessage returns a new message instance.
|
||||||
|
func NewMessage(data string) *Message {
|
||||||
|
return &Message{
|
||||||
|
Data: data,
|
||||||
|
Time: time.Now(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// String returns the message as a string.
|
||||||
|
func (m *Message) String() string {
|
||||||
|
sb := strings.Builder{}
|
||||||
|
|
||||||
|
if m.Event != "" {
|
||||||
|
sb.WriteString(fmt.Sprintf("event: %s\n", m.Event))
|
||||||
|
}
|
||||||
|
sb.WriteString(fmt.Sprintf("data: %v\n\n", m.Data))
|
||||||
|
|
||||||
|
return sb.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// WithEvent sets the event name for the message.
|
||||||
|
func (m *Message) WithEvent(event string) Envelope {
|
||||||
|
m.Event = event
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
// broadcastManager manages the clients and broadcasts messages to them.
|
||||||
|
type broadcastManager struct {
|
||||||
|
clients sync.Map
|
||||||
|
broadcast chan Envelope
|
||||||
|
workerPoolSize int
|
||||||
|
messageHistory *history
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewManager initializes and returns a new Manager instance.
|
||||||
|
func NewManager(workerPoolSize int) Manager {
|
||||||
|
manager := &broadcastManager{
|
||||||
|
broadcast: make(chan Envelope),
|
||||||
|
workerPoolSize: workerPoolSize,
|
||||||
|
messageHistory: newHistory(10),
|
||||||
|
}
|
||||||
|
|
||||||
|
manager.startWorkers()
|
||||||
|
|
||||||
|
return manager
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send broadcasts a message to all connected clients.
|
||||||
|
func (manager *broadcastManager) Send(message Envelope) {
|
||||||
|
manager.broadcast <- message
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle sets up a new client and handles the connection.
|
||||||
|
func (manager *broadcastManager) Handle(c *fiber.Ctx, cl Listener) {
|
||||||
|
|
||||||
|
manager.register(cl)
|
||||||
|
ctx := c.Context()
|
||||||
|
|
||||||
|
ctx.SetContentType("text/event-stream")
|
||||||
|
ctx.Response.Header.Set("Cache-Control", "no-cache")
|
||||||
|
ctx.Response.Header.Set("Connection", "keep-alive")
|
||||||
|
ctx.Response.Header.Set("Access-Control-Allow-Origin", "*")
|
||||||
|
ctx.Response.Header.Set("Access-Control-Allow-Headers", "Cache-Control")
|
||||||
|
ctx.Response.Header.Set("Access-Control-Allow-Credentials", "true")
|
||||||
|
|
||||||
|
// Send history to the newly connected client
|
||||||
|
manager.messageHistory.Send(cl)
|
||||||
|
ctx.SetBodyStreamWriter(fasthttp.StreamWriter(func(w *bufio.Writer) {
|
||||||
|
for {
|
||||||
|
select {
|
||||||
|
case msg, ok := <-cl.Chan():
|
||||||
|
if !ok {
|
||||||
|
// If the channel is closed, return from the function
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_, err := fmt.Fprint(w, msg.String())
|
||||||
|
if err != nil {
|
||||||
|
// If an error occurs (e.g., client has disconnected), return from the function
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Flush()
|
||||||
|
|
||||||
|
case <-ctx.Done():
|
||||||
|
manager.unregister(cl.ID())
|
||||||
|
close(cl.Chan())
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clients method to list connected client IDs
|
||||||
|
func (manager *broadcastManager) Clients() []string {
|
||||||
|
var clients []string
|
||||||
|
manager.clients.Range(func(key, value any) bool {
|
||||||
|
id, ok := key.(string)
|
||||||
|
if ok {
|
||||||
|
clients = append(clients, id)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return clients
|
||||||
|
}
|
||||||
|
|
||||||
|
// startWorkers starts worker goroutines for message broadcasting.
|
||||||
|
func (manager *broadcastManager) startWorkers() {
|
||||||
|
for i := 0; i < manager.workerPoolSize; i++ {
|
||||||
|
go func() {
|
||||||
|
for message := range manager.broadcast {
|
||||||
|
manager.clients.Range(func(key, value any) bool {
|
||||||
|
client, ok := value.(Listener)
|
||||||
|
if !ok {
|
||||||
|
return true // Continue iteration
|
||||||
|
}
|
||||||
|
select {
|
||||||
|
case client.Chan() <- message:
|
||||||
|
manager.messageHistory.Add(message)
|
||||||
|
default:
|
||||||
|
// If the client's channel is full, drop the message
|
||||||
|
}
|
||||||
|
return true // Continue iteration
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// register adds a client to the manager.
|
||||||
|
func (manager *broadcastManager) register(client Listener) {
|
||||||
|
manager.clients.Store(client.ID(), client)
|
||||||
|
}
|
||||||
|
|
||||||
|
// unregister removes a client from the manager.
|
||||||
|
func (manager *broadcastManager) unregister(clientID string) {
|
||||||
|
manager.clients.Delete(clientID)
|
||||||
|
}
|
||||||
|
|
||||||
|
type history struct {
|
||||||
|
messages []Envelope
|
||||||
|
maxSize int // Maximum number of messages to retain
|
||||||
|
}
|
||||||
|
|
||||||
|
func newHistory(maxSize int) *history {
|
||||||
|
return &history{
|
||||||
|
messages: []Envelope{},
|
||||||
|
maxSize: maxSize,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *history) Add(message Envelope) {
|
||||||
|
h.messages = append(h.messages, message)
|
||||||
|
// Ensure history does not exceed maxSize
|
||||||
|
if len(h.messages) > h.maxSize {
|
||||||
|
// Remove the oldest messages to fit the maxSize
|
||||||
|
h.messages = h.messages[len(h.messages)-h.maxSize:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *history) Send(c Listener) {
|
||||||
|
for _, msg := range h.messages {
|
||||||
|
c.Chan() <- msg
|
||||||
|
}
|
||||||
|
}
|
||||||
288
core/state/config.go
Normal file
288
core/state/config.go
Normal file
@@ -0,0 +1,288 @@
|
|||||||
|
package state
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/agent"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/config"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ConnectorConfig struct {
|
||||||
|
Type string `json:"type"` // e.g. Slack
|
||||||
|
Config string `json:"config"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionsConfig struct {
|
||||||
|
Name string `json:"name"` // e.g. search
|
||||||
|
Config string `json:"config"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DynamicPromptsConfig struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Config string `json:"config"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d DynamicPromptsConfig) ToMap() map[string]string {
|
||||||
|
config := map[string]string{}
|
||||||
|
json.Unmarshal([]byte(d.Config), &config)
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentConfig struct {
|
||||||
|
Connector []ConnectorConfig `json:"connectors" form:"connectors" `
|
||||||
|
Actions []ActionsConfig `json:"actions" form:"actions"`
|
||||||
|
DynamicPrompts []DynamicPromptsConfig `json:"dynamic_prompts" form:"dynamic_prompts"`
|
||||||
|
MCPServers []agent.MCPServer `json:"mcp_servers" form:"mcp_servers"`
|
||||||
|
|
||||||
|
Description string `json:"description" form:"description"`
|
||||||
|
|
||||||
|
Model string `json:"model" form:"model"`
|
||||||
|
MultimodalModel string `json:"multimodal_model" form:"multimodal_model"`
|
||||||
|
APIURL string `json:"api_url" form:"api_url"`
|
||||||
|
APIKey string `json:"api_key" form:"api_key"`
|
||||||
|
LocalRAGURL string `json:"local_rag_url" form:"local_rag_url"`
|
||||||
|
LocalRAGAPIKey string `json:"local_rag_api_key" form:"local_rag_api_key"`
|
||||||
|
|
||||||
|
Name string `json:"name" form:"name"`
|
||||||
|
HUD bool `json:"hud" form:"hud"`
|
||||||
|
StandaloneJob bool `json:"standalone_job" form:"standalone_job"`
|
||||||
|
RandomIdentity bool `json:"random_identity" form:"random_identity"`
|
||||||
|
InitiateConversations bool `json:"initiate_conversations" form:"initiate_conversations"`
|
||||||
|
CanPlan bool `json:"enable_planning" form:"enable_planning"`
|
||||||
|
IdentityGuidance string `json:"identity_guidance" form:"identity_guidance"`
|
||||||
|
PeriodicRuns string `json:"periodic_runs" form:"periodic_runs"`
|
||||||
|
PermanentGoal string `json:"permanent_goal" form:"permanent_goal"`
|
||||||
|
EnableKnowledgeBase bool `json:"enable_kb" form:"enable_kb"`
|
||||||
|
EnableReasoning bool `json:"enable_reasoning" form:"enable_reasoning"`
|
||||||
|
KnowledgeBaseResults int `json:"kb_results" form:"kb_results"`
|
||||||
|
LoopDetectionSteps int `json:"loop_detection_steps" form:"loop_detection_steps"`
|
||||||
|
CanStopItself bool `json:"can_stop_itself" form:"can_stop_itself"`
|
||||||
|
SystemPrompt string `json:"system_prompt" form:"system_prompt"`
|
||||||
|
LongTermMemory bool `json:"long_term_memory" form:"long_term_memory"`
|
||||||
|
SummaryLongTermMemory bool `json:"summary_long_term_memory" form:"summary_long_term_memory"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentConfigMeta struct {
|
||||||
|
Fields []config.Field
|
||||||
|
Connectors []config.FieldGroup
|
||||||
|
Actions []config.FieldGroup
|
||||||
|
DynamicPrompts []config.FieldGroup
|
||||||
|
MCPServers []config.Field
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAgentConfigMeta(
|
||||||
|
actionsConfig []config.FieldGroup,
|
||||||
|
connectorsConfig []config.FieldGroup,
|
||||||
|
dynamicPromptsConfig []config.FieldGroup,
|
||||||
|
) AgentConfigMeta {
|
||||||
|
return AgentConfigMeta{
|
||||||
|
Fields: []config.Field{
|
||||||
|
{
|
||||||
|
Name: "name",
|
||||||
|
Label: "Name",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Required: true,
|
||||||
|
Tags: config.Tags{Section: "BasicInfo"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "description",
|
||||||
|
Label: "Description",
|
||||||
|
Type: "textarea",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "BasicInfo"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "identity_guidance",
|
||||||
|
Label: "Identity Guidance",
|
||||||
|
Type: "textarea",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "BasicInfo"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "random_identity",
|
||||||
|
Label: "Random Identity",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
Tags: config.Tags{Section: "BasicInfo"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "hud",
|
||||||
|
Label: "HUD",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
Tags: config.Tags{Section: "BasicInfo"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "model",
|
||||||
|
Label: "Model",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "multimodal_model",
|
||||||
|
Label: "Multimodal Model",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "api_url",
|
||||||
|
Label: "API URL",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "api_key",
|
||||||
|
Label: "API Key",
|
||||||
|
Type: "password",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "local_rag_url",
|
||||||
|
Label: "Local RAG URL",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "local_rag_api_key",
|
||||||
|
Label: "Local RAG API Key",
|
||||||
|
Type: "password",
|
||||||
|
DefaultValue: "",
|
||||||
|
Tags: config.Tags{Section: "ModelSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "enable_kb",
|
||||||
|
Label: "Enable Knowledge Base",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
Tags: config.Tags{Section: "MemorySettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "kb_results",
|
||||||
|
Label: "Knowledge Base Results",
|
||||||
|
Type: "number",
|
||||||
|
DefaultValue: 5,
|
||||||
|
Min: 1,
|
||||||
|
Step: 1,
|
||||||
|
Tags: config.Tags{Section: "MemorySettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "long_term_memory",
|
||||||
|
Label: "Long Term Memory",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
Tags: config.Tags{Section: "MemorySettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "summary_long_term_memory",
|
||||||
|
Label: "Summary Long Term Memory",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
Tags: config.Tags{Section: "MemorySettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "system_prompt",
|
||||||
|
Label: "System Prompt",
|
||||||
|
Type: "textarea",
|
||||||
|
DefaultValue: "",
|
||||||
|
HelpText: "Instructions that define the agent's behavior and capabilities",
|
||||||
|
Tags: config.Tags{Section: "PromptsGoals"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "permanent_goal",
|
||||||
|
Label: "Permanent Goal",
|
||||||
|
Type: "textarea",
|
||||||
|
DefaultValue: "",
|
||||||
|
HelpText: "Long-term objective for the agent to pursue",
|
||||||
|
Tags: config.Tags{Section: "PromptsGoals"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "standalone_job",
|
||||||
|
Label: "Standalone Job",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
HelpText: "Run as a standalone job without user interaction",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "initiate_conversations",
|
||||||
|
Label: "Initiate Conversations",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
HelpText: "Allow agent to start conversations on its own",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "enable_planning",
|
||||||
|
Label: "Enable Planning",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
HelpText: "Enable agent to create and execute plans",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "can_stop_itself",
|
||||||
|
Label: "Can Stop Itself",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
HelpText: "Allow agent to terminate its own execution",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "periodic_runs",
|
||||||
|
Label: "Periodic Runs",
|
||||||
|
Type: "text",
|
||||||
|
DefaultValue: "",
|
||||||
|
Placeholder: "10m",
|
||||||
|
HelpText: "Duration for scheduling periodic agent runs",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "enable_reasoning",
|
||||||
|
Label: "Enable Reasoning",
|
||||||
|
Type: "checkbox",
|
||||||
|
DefaultValue: false,
|
||||||
|
HelpText: "Enable agent to explain its reasoning process",
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "loop_detection_steps",
|
||||||
|
Label: "Max Loop Detection Steps",
|
||||||
|
Type: "number",
|
||||||
|
DefaultValue: 5,
|
||||||
|
Min: 1,
|
||||||
|
Step: 1,
|
||||||
|
Tags: config.Tags{Section: "AdvancedSettings"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MCPServers: []config.Field{
|
||||||
|
{
|
||||||
|
Name: "url",
|
||||||
|
Label: "URL",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "token",
|
||||||
|
Label: "API Key",
|
||||||
|
Type: config.FieldTypeText,
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
DynamicPrompts: dynamicPromptsConfig,
|
||||||
|
Connectors: connectorsConfig,
|
||||||
|
Actions: actionsConfig,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type Connector interface {
|
||||||
|
AgentResultCallback() func(state types.ActionState)
|
||||||
|
AgentReasoningCallback() func(state types.ActionCurrentState) bool
|
||||||
|
Start(a *agent.Agent)
|
||||||
|
}
|
||||||
33
core/state/internal.go
Normal file
33
core/state/internal.go
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
package state
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/mudler/LocalAGI/core/agent"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AgentPoolInternalAPI struct {
|
||||||
|
*AgentPool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) InternalAPI() *AgentPoolInternalAPI {
|
||||||
|
return &AgentPoolInternalAPI{a}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPoolInternalAPI) GetAgent(name string) *Agent {
|
||||||
|
return a.agents[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPoolInternalAPI) AllAgents() []string {
|
||||||
|
var agents []string
|
||||||
|
for agent := range a.agents {
|
||||||
|
agents = append(agents, agent)
|
||||||
|
}
|
||||||
|
return agents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPoolInternalAPI) GetConfig(name string) *AgentConfig {
|
||||||
|
agent, exists := a.pool[name]
|
||||||
|
if !exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &agent
|
||||||
|
}
|
||||||
629
core/state/pool.go
Normal file
629
core/state/pool.go
Normal file
@@ -0,0 +1,629 @@
|
|||||||
|
package state
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
. "github.com/mudler/LocalAGI/core/agent"
|
||||||
|
"github.com/mudler/LocalAGI/core/sse"
|
||||||
|
"github.com/mudler/LocalAGI/core/types"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/llm"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/localrag"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/utils"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AgentPool struct {
|
||||||
|
sync.Mutex
|
||||||
|
file string
|
||||||
|
pooldir string
|
||||||
|
pool AgentPoolData
|
||||||
|
agents map[string]*Agent
|
||||||
|
managers map[string]sse.Manager
|
||||||
|
agentStatus map[string]*Status
|
||||||
|
apiURL, defaultModel, defaultMultimodalModel string
|
||||||
|
imageModel, localRAGAPI, localRAGKey, apiKey string
|
||||||
|
availableActions func(*AgentConfig) func(ctx context.Context, pool *AgentPool) []types.Action
|
||||||
|
connectors func(*AgentConfig) []Connector
|
||||||
|
dynamicPrompt func(*AgentConfig) []DynamicPrompt
|
||||||
|
timeout string
|
||||||
|
conversationLogs string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Status struct {
|
||||||
|
ActionResults []types.ActionState
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Status) addResult(result types.ActionState) {
|
||||||
|
// If we have more than 10 results, remove the oldest one
|
||||||
|
if len(s.ActionResults) > 10 {
|
||||||
|
s.ActionResults = s.ActionResults[1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
s.ActionResults = append(s.ActionResults, result)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s *Status) Results() []types.ActionState {
|
||||||
|
return s.ActionResults
|
||||||
|
}
|
||||||
|
|
||||||
|
type AgentPoolData map[string]AgentConfig
|
||||||
|
|
||||||
|
func loadPoolFromFile(path string) (*AgentPoolData, error) {
|
||||||
|
data, err := os.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
poolData := &AgentPoolData{}
|
||||||
|
err = json.Unmarshal(data, poolData)
|
||||||
|
return poolData, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewAgentPool(
|
||||||
|
defaultModel, defaultMultimodalModel, imageModel, apiURL, apiKey, directory string,
|
||||||
|
LocalRAGAPI string,
|
||||||
|
availableActions func(*AgentConfig) func(ctx context.Context, pool *AgentPool) []types.Action,
|
||||||
|
connectors func(*AgentConfig) []Connector,
|
||||||
|
promptBlocks func(*AgentConfig) []DynamicPrompt,
|
||||||
|
timeout string,
|
||||||
|
withLogs bool,
|
||||||
|
) (*AgentPool, error) {
|
||||||
|
// if file exists, try to load an existing pool.
|
||||||
|
// if file does not exist, create a new pool.
|
||||||
|
|
||||||
|
poolfile := filepath.Join(directory, "pool.json")
|
||||||
|
|
||||||
|
conversationPath := ""
|
||||||
|
if withLogs {
|
||||||
|
conversationPath = filepath.Join(directory, "conversations")
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := os.Stat(poolfile); err != nil {
|
||||||
|
// file does not exist, create a new pool
|
||||||
|
return &AgentPool{
|
||||||
|
file: poolfile,
|
||||||
|
pooldir: directory,
|
||||||
|
apiURL: apiURL,
|
||||||
|
defaultModel: defaultModel,
|
||||||
|
defaultMultimodalModel: defaultMultimodalModel,
|
||||||
|
imageModel: imageModel,
|
||||||
|
localRAGAPI: LocalRAGAPI,
|
||||||
|
apiKey: apiKey,
|
||||||
|
agents: make(map[string]*Agent),
|
||||||
|
pool: make(map[string]AgentConfig),
|
||||||
|
agentStatus: make(map[string]*Status),
|
||||||
|
managers: make(map[string]sse.Manager),
|
||||||
|
connectors: connectors,
|
||||||
|
availableActions: availableActions,
|
||||||
|
dynamicPrompt: promptBlocks,
|
||||||
|
timeout: timeout,
|
||||||
|
conversationLogs: conversationPath,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
poolData, err := loadPoolFromFile(poolfile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &AgentPool{
|
||||||
|
file: poolfile,
|
||||||
|
apiURL: apiURL,
|
||||||
|
pooldir: directory,
|
||||||
|
defaultModel: defaultModel,
|
||||||
|
defaultMultimodalModel: defaultMultimodalModel,
|
||||||
|
imageModel: imageModel,
|
||||||
|
apiKey: apiKey,
|
||||||
|
agents: make(map[string]*Agent),
|
||||||
|
managers: make(map[string]sse.Manager),
|
||||||
|
agentStatus: map[string]*Status{},
|
||||||
|
pool: *poolData,
|
||||||
|
connectors: connectors,
|
||||||
|
localRAGAPI: LocalRAGAPI,
|
||||||
|
dynamicPrompt: promptBlocks,
|
||||||
|
availableActions: availableActions,
|
||||||
|
timeout: timeout,
|
||||||
|
conversationLogs: conversationPath,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func replaceInvalidChars(s string) string {
|
||||||
|
s = strings.ReplaceAll(s, "/", "_")
|
||||||
|
return strings.ReplaceAll(s, " ", "_")
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateAgent adds a new agent to the pool
|
||||||
|
// and starts it.
|
||||||
|
// It also saves the state to the file.
|
||||||
|
func (a *AgentPool) CreateAgent(name string, agentConfig *AgentConfig) error {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
name = replaceInvalidChars(name)
|
||||||
|
agentConfig.Name = name
|
||||||
|
if _, ok := a.pool[name]; ok {
|
||||||
|
return fmt.Errorf("agent %s already exists", name)
|
||||||
|
}
|
||||||
|
a.pool[name] = *agentConfig
|
||||||
|
if err := a.save(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
go func(ac AgentConfig) {
|
||||||
|
// Create the agent avatar
|
||||||
|
if err := createAgentAvatar(a.apiURL, a.apiKey, a.defaultModel, a.imageModel, a.pooldir, ac); err != nil {
|
||||||
|
xlog.Error("Failed to create agent avatar", "error", err)
|
||||||
|
}
|
||||||
|
}(a.pool[name])
|
||||||
|
|
||||||
|
return a.startAgentWithConfig(name, agentConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
func createAgentAvatar(APIURL, APIKey, model, imageModel, avatarDir string, agent AgentConfig) error {
|
||||||
|
client := llm.NewClient(APIKey, APIURL+"/v1", "10m")
|
||||||
|
|
||||||
|
if imageModel == "" {
|
||||||
|
return fmt.Errorf("image model not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
if model == "" {
|
||||||
|
return fmt.Errorf("default model not set")
|
||||||
|
}
|
||||||
|
|
||||||
|
imagePath := filepath.Join(avatarDir, "avatars", fmt.Sprintf("%s.png", agent.Name))
|
||||||
|
if _, err := os.Stat(imagePath); err == nil {
|
||||||
|
// Image already exists
|
||||||
|
xlog.Debug("Avatar already exists", "path", imagePath)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var results struct {
|
||||||
|
ImagePrompt string `json:"image_prompt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
err := llm.GenerateTypedJSON(
|
||||||
|
context.Background(),
|
||||||
|
llm.NewClient(APIKey, APIURL, "10m"),
|
||||||
|
"Generate a prompt that I can use to create a random avatar for the bot '"+agent.Name+"', the description of the bot is: "+agent.Description,
|
||||||
|
model,
|
||||||
|
jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: map[string]jsonschema.Definition{
|
||||||
|
"image_prompt": {
|
||||||
|
Type: jsonschema.String,
|
||||||
|
Description: "The prompt to generate the image",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Required: []string{"image_prompt"},
|
||||||
|
}, &results)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate image prompt: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if results.ImagePrompt == "" {
|
||||||
|
xlog.Error("Failed to generate image prompt")
|
||||||
|
return fmt.Errorf("failed to generate image prompt")
|
||||||
|
}
|
||||||
|
|
||||||
|
req := openai.ImageRequest{
|
||||||
|
Prompt: results.ImagePrompt,
|
||||||
|
Model: imageModel,
|
||||||
|
Size: openai.CreateImageSize256x256,
|
||||||
|
ResponseFormat: openai.CreateImageResponseFormatB64JSON,
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx, cancel := context.WithTimeout(context.Background(), 120*time.Second)
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
resp, err := client.CreateImage(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("failed to generate image: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Data) == 0 {
|
||||||
|
return fmt.Errorf("failed to generate image")
|
||||||
|
}
|
||||||
|
|
||||||
|
imageJson := resp.Data[0].B64JSON
|
||||||
|
|
||||||
|
os.MkdirAll(filepath.Join(avatarDir, "avatars"), 0755)
|
||||||
|
|
||||||
|
// Save the image to the agent directory
|
||||||
|
imageData, err := base64.StdEncoding.DecodeString(imageJson)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return os.WriteFile(imagePath, imageData, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) List() []string {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
|
||||||
|
var agents []string
|
||||||
|
for agent := range a.pool {
|
||||||
|
agents = append(agents, agent)
|
||||||
|
}
|
||||||
|
// return a sorted list
|
||||||
|
sort.SliceStable(agents, func(i, j int) bool {
|
||||||
|
return agents[i] < agents[j]
|
||||||
|
})
|
||||||
|
return agents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) GetStatusHistory(name string) *Status {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
return a.agentStatus[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) startAgentWithConfig(name string, config *AgentConfig) error {
|
||||||
|
manager := sse.NewManager(5)
|
||||||
|
ctx := context.Background()
|
||||||
|
model := a.defaultModel
|
||||||
|
multimodalModel := a.defaultMultimodalModel
|
||||||
|
|
||||||
|
if config.MultimodalModel != "" {
|
||||||
|
multimodalModel = config.MultimodalModel
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.Model != "" {
|
||||||
|
model = config.Model
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.PeriodicRuns == "" {
|
||||||
|
config.PeriodicRuns = "10m"
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.APIURL != "" {
|
||||||
|
a.apiURL = config.APIURL
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.APIKey != "" {
|
||||||
|
a.apiKey = config.APIKey
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.LocalRAGURL != "" {
|
||||||
|
a.localRAGAPI = config.LocalRAGURL
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.LocalRAGAPIKey != "" {
|
||||||
|
a.localRAGKey = config.LocalRAGAPIKey
|
||||||
|
}
|
||||||
|
|
||||||
|
connectors := a.connectors(config)
|
||||||
|
promptBlocks := a.dynamicPrompt(config)
|
||||||
|
actions := a.availableActions(config)(ctx, a)
|
||||||
|
stateFile, characterFile := a.stateFiles(name)
|
||||||
|
|
||||||
|
actionsLog := []string{}
|
||||||
|
for _, action := range actions {
|
||||||
|
actionsLog = append(actionsLog, action.Definition().Name.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
connectorLog := []string{}
|
||||||
|
for _, connector := range connectors {
|
||||||
|
connectorLog = append(connectorLog, fmt.Sprintf("%+v", connector))
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info(
|
||||||
|
"Creating agent",
|
||||||
|
"name", name,
|
||||||
|
"model", model,
|
||||||
|
"api_url", a.apiURL,
|
||||||
|
"actions", actionsLog,
|
||||||
|
"connectors", connectorLog,
|
||||||
|
)
|
||||||
|
|
||||||
|
// dynamicPrompts := []map[string]string{}
|
||||||
|
// for _, p := range config.DynamicPrompts {
|
||||||
|
// dynamicPrompts = append(dynamicPrompts, p.ToMap())
|
||||||
|
// }
|
||||||
|
|
||||||
|
opts := []Option{
|
||||||
|
WithModel(model),
|
||||||
|
WithLLMAPIURL(a.apiURL),
|
||||||
|
WithContext(ctx),
|
||||||
|
WithMCPServers(config.MCPServers...),
|
||||||
|
WithPeriodicRuns(config.PeriodicRuns),
|
||||||
|
WithPermanentGoal(config.PermanentGoal),
|
||||||
|
WithPrompts(promptBlocks...),
|
||||||
|
// WithDynamicPrompts(dynamicPrompts...),
|
||||||
|
WithCharacter(Character{
|
||||||
|
Name: name,
|
||||||
|
}),
|
||||||
|
WithActions(
|
||||||
|
actions...,
|
||||||
|
),
|
||||||
|
WithStateFile(stateFile),
|
||||||
|
WithCharacterFile(characterFile),
|
||||||
|
WithLLMAPIKey(a.apiKey),
|
||||||
|
WithTimeout(a.timeout),
|
||||||
|
WithRAGDB(localrag.NewWrappedClient(a.localRAGAPI, a.localRAGKey, name)),
|
||||||
|
WithAgentReasoningCallback(func(state types.ActionCurrentState) bool {
|
||||||
|
xlog.Info(
|
||||||
|
"Agent is thinking",
|
||||||
|
"agent", name,
|
||||||
|
"reasoning", state.Reasoning,
|
||||||
|
"action", state.Action.Definition().Name,
|
||||||
|
"params", state.Params,
|
||||||
|
)
|
||||||
|
|
||||||
|
manager.Send(
|
||||||
|
sse.NewMessage(
|
||||||
|
fmt.Sprintf(`Thinking: %s`, utils.HTMLify(state.Reasoning)),
|
||||||
|
).WithEvent("status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, c := range connectors {
|
||||||
|
if !c.AgentReasoningCallback()(state) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}),
|
||||||
|
WithSystemPrompt(config.SystemPrompt),
|
||||||
|
WithMultimodalModel(multimodalModel),
|
||||||
|
WithAgentResultCallback(func(state types.ActionState) {
|
||||||
|
a.Lock()
|
||||||
|
if _, ok := a.agentStatus[name]; !ok {
|
||||||
|
a.agentStatus[name] = &Status{}
|
||||||
|
}
|
||||||
|
|
||||||
|
a.agentStatus[name].addResult(state)
|
||||||
|
a.Unlock()
|
||||||
|
xlog.Debug(
|
||||||
|
"Calling agent result callback",
|
||||||
|
)
|
||||||
|
|
||||||
|
text := fmt.Sprintf(`Reasoning: %s
|
||||||
|
Action taken: %+v
|
||||||
|
Parameters: %+v
|
||||||
|
Result: %s`,
|
||||||
|
state.Reasoning,
|
||||||
|
state.ActionCurrentState.Action.Definition().Name,
|
||||||
|
state.ActionCurrentState.Params,
|
||||||
|
state.Result)
|
||||||
|
manager.Send(
|
||||||
|
sse.NewMessage(
|
||||||
|
utils.HTMLify(
|
||||||
|
text,
|
||||||
|
),
|
||||||
|
).WithEvent("status"),
|
||||||
|
)
|
||||||
|
|
||||||
|
for _, c := range connectors {
|
||||||
|
c.AgentResultCallback()(state)
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.HUD {
|
||||||
|
opts = append(opts, EnableHUD)
|
||||||
|
}
|
||||||
|
|
||||||
|
if a.conversationLogs != "" {
|
||||||
|
opts = append(opts, WithConversationsPath(a.conversationLogs))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.StandaloneJob {
|
||||||
|
opts = append(opts, EnableStandaloneJob)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.LongTermMemory {
|
||||||
|
opts = append(opts, EnableLongTermMemory)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.SummaryLongTermMemory {
|
||||||
|
opts = append(opts, EnableSummaryMemory)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.CanStopItself {
|
||||||
|
opts = append(opts, CanStopItself)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.CanPlan {
|
||||||
|
opts = append(opts, EnablePlanning)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.InitiateConversations {
|
||||||
|
opts = append(opts, EnableInitiateConversations)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.RandomIdentity {
|
||||||
|
if config.IdentityGuidance != "" {
|
||||||
|
opts = append(opts, WithRandomIdentity(config.IdentityGuidance))
|
||||||
|
} else {
|
||||||
|
opts = append(opts, WithRandomIdentity())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.EnableKnowledgeBase {
|
||||||
|
opts = append(opts, EnableKnowledgeBase)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.EnableReasoning {
|
||||||
|
opts = append(opts, EnableForceReasoning)
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.KnowledgeBaseResults > 0 {
|
||||||
|
opts = append(opts, EnableKnowledgeBaseWithResults(config.KnowledgeBaseResults))
|
||||||
|
}
|
||||||
|
|
||||||
|
if config.LoopDetectionSteps > 0 {
|
||||||
|
opts = append(opts, WithLoopDetectionSteps(config.LoopDetectionSteps))
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Info("Starting agent", "name", name, "config", config)
|
||||||
|
|
||||||
|
agent, err := New(opts...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
a.agents[name] = agent
|
||||||
|
a.managers[name] = manager
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
if err := agent.Run(); err != nil {
|
||||||
|
xlog.Error("Agent stopped", "error", err.Error(), "name", name)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
xlog.Info("Starting connectors", "name", name, "config", config)
|
||||||
|
|
||||||
|
for _, c := range connectors {
|
||||||
|
go c.Start(agent)
|
||||||
|
}
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
for {
|
||||||
|
time.Sleep(1 * time.Second) // Send a message every seconds
|
||||||
|
manager.Send(sse.NewMessage(
|
||||||
|
utils.HTMLify(agent.State().String()),
|
||||||
|
).WithEvent("hud"))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
xlog.Info("Agent started", "name", name)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Starts all the agents in the pool
|
||||||
|
func (a *AgentPool) StartAll() error {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
for name, config := range a.pool {
|
||||||
|
if a.agents[name] != nil { // Agent already started
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := a.startAgentWithConfig(name, &config); err != nil {
|
||||||
|
xlog.Error("Failed to start agent", "name", name, "error", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) StopAll() {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
for _, agent := range a.agents {
|
||||||
|
agent.Stop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) Stop(name string) {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
a.stop(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) stop(name string) {
|
||||||
|
if agent, ok := a.agents[name]; ok {
|
||||||
|
agent.Stop()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
func (a *AgentPool) Start(name string) error {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
if agent, ok := a.agents[name]; ok {
|
||||||
|
err := agent.Run()
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("agent %s failed to start: %w", name, err)
|
||||||
|
}
|
||||||
|
xlog.Info("Agent started", "name", name)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if config, ok := a.pool[name]; ok {
|
||||||
|
return a.startAgentWithConfig(name, &config)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fmt.Errorf("agent %s not found", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) stateFiles(name string) (string, string) {
|
||||||
|
stateFile := filepath.Join(a.pooldir, fmt.Sprintf("%s.state.json", name))
|
||||||
|
characterFile := filepath.Join(a.pooldir, fmt.Sprintf("%s.character.json", name))
|
||||||
|
|
||||||
|
return stateFile, characterFile
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) Remove(name string) error {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
// Cleanup character and state
|
||||||
|
stateFile, characterFile := a.stateFiles(name)
|
||||||
|
|
||||||
|
os.Remove(stateFile)
|
||||||
|
os.Remove(characterFile)
|
||||||
|
|
||||||
|
a.stop(name)
|
||||||
|
delete(a.agents, name)
|
||||||
|
delete(a.pool, name)
|
||||||
|
|
||||||
|
// remove avatar
|
||||||
|
os.Remove(filepath.Join(a.pooldir, "avatars", fmt.Sprintf("%s.png", name)))
|
||||||
|
|
||||||
|
if err := a.save(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) Save() error {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
return a.save()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) save() error {
|
||||||
|
data, err := json.MarshalIndent(a.pool, "", " ")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return os.WriteFile(a.file, data, 0644)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) GetAgent(name string) *Agent {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
return a.agents[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) AllAgents() []string {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
var agents []string
|
||||||
|
for agent := range a.agents {
|
||||||
|
agents = append(agents, agent)
|
||||||
|
}
|
||||||
|
return agents
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) GetConfig(name string) *AgentConfig {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
agent, exists := a.pool[name]
|
||||||
|
if !exists {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &agent
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *AgentPool) GetManager(name string) sse.Manager {
|
||||||
|
a.Lock()
|
||||||
|
defer a.Unlock()
|
||||||
|
return a.managers[name]
|
||||||
|
}
|
||||||
128
core/types/actions.go
Normal file
128
core/types/actions.go
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ActionContext struct {
|
||||||
|
context.Context
|
||||||
|
cancelFunc context.CancelFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ac *ActionContext) Cancel() {
|
||||||
|
if ac.cancelFunc != nil {
|
||||||
|
ac.cancelFunc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewActionContext(ctx context.Context, cancel context.CancelFunc) *ActionContext {
|
||||||
|
return &ActionContext{
|
||||||
|
Context: ctx,
|
||||||
|
cancelFunc: cancel,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionParams map[string]interface{}
|
||||||
|
|
||||||
|
type ActionResult struct {
|
||||||
|
Job *Job
|
||||||
|
Result string
|
||||||
|
Metadata map[string]interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ap ActionParams) Read(s string) error {
|
||||||
|
err := json.Unmarshal([]byte(s), &ap)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ap ActionParams) String() string {
|
||||||
|
b, _ := json.Marshal(ap)
|
||||||
|
return string(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ap ActionParams) Unmarshal(v interface{}) error {
|
||||||
|
b, err := json.Marshal(ap)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := json.Unmarshal(b, v); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//type ActionDefinition openai.FunctionDefinition
|
||||||
|
|
||||||
|
type ActionDefinition struct {
|
||||||
|
Properties map[string]jsonschema.Definition
|
||||||
|
Required []string
|
||||||
|
Name ActionDefinitionName
|
||||||
|
Description string
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionDefinitionName string
|
||||||
|
|
||||||
|
func (a ActionDefinitionName) Is(name string) bool {
|
||||||
|
return string(a) == name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a ActionDefinitionName) String() string {
|
||||||
|
return string(a)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a ActionDefinition) ToFunctionDefinition() openai.FunctionDefinition {
|
||||||
|
return openai.FunctionDefinition{
|
||||||
|
Name: a.Name.String(),
|
||||||
|
Description: a.Description,
|
||||||
|
Parameters: jsonschema.Definition{
|
||||||
|
Type: jsonschema.Object,
|
||||||
|
Properties: a.Properties,
|
||||||
|
Required: a.Required,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actions is something the agent can do
|
||||||
|
type Action interface {
|
||||||
|
Run(ctx context.Context, action ActionParams) (ActionResult, error)
|
||||||
|
Definition() ActionDefinition
|
||||||
|
Plannable() bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type Actions []Action
|
||||||
|
|
||||||
|
func (a Actions) ToTools() []openai.Tool {
|
||||||
|
tools := []openai.Tool{}
|
||||||
|
for _, action := range a {
|
||||||
|
tools = append(tools, openai.Tool{
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: action.Definition().ToFunctionDefinition(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return tools
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a Actions) Find(name string) Action {
|
||||||
|
for _, action := range a {
|
||||||
|
if action.Definition().Name.Is(name) {
|
||||||
|
return action
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionState struct {
|
||||||
|
ActionCurrentState
|
||||||
|
ActionResult
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionCurrentState struct {
|
||||||
|
Job *Job
|
||||||
|
Action Action
|
||||||
|
Params ActionParams
|
||||||
|
Reasoning string
|
||||||
|
}
|
||||||
200
core/types/job.go
Normal file
200
core/types/job.go
Normal file
@@ -0,0 +1,200 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/google/uuid"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Job is a request to the agent to do something
|
||||||
|
type Job struct {
|
||||||
|
// The job is a request to the agent to do something
|
||||||
|
// It can be a question, a command, or a request to do something
|
||||||
|
// The agent will try to do it, and return a response
|
||||||
|
Result *JobResult
|
||||||
|
ReasoningCallback func(ActionCurrentState) bool
|
||||||
|
ResultCallback func(ActionState)
|
||||||
|
ConversationHistory []openai.ChatCompletionMessage
|
||||||
|
UUID string
|
||||||
|
Metadata map[string]interface{}
|
||||||
|
|
||||||
|
pastActions []*ActionRequest
|
||||||
|
nextAction *Action
|
||||||
|
nextActionParams *ActionParams
|
||||||
|
nextActionReasoning string
|
||||||
|
|
||||||
|
context context.Context
|
||||||
|
cancel context.CancelFunc
|
||||||
|
}
|
||||||
|
|
||||||
|
type ActionRequest struct {
|
||||||
|
Action Action
|
||||||
|
Params *ActionParams
|
||||||
|
}
|
||||||
|
|
||||||
|
type JobOption func(*Job)
|
||||||
|
|
||||||
|
func WithConversationHistory(history []openai.ChatCompletionMessage) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.ConversationHistory = history
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithReasoningCallback(f func(ActionCurrentState) bool) JobOption {
|
||||||
|
return func(r *Job) {
|
||||||
|
r.ReasoningCallback = f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithResultCallback(f func(ActionState)) JobOption {
|
||||||
|
return func(r *Job) {
|
||||||
|
r.ResultCallback = f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithMetadata(metadata map[string]interface{}) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.Metadata = metadata
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJobResult creates a new job result
|
||||||
|
func NewJobResult() *JobResult {
|
||||||
|
r := &JobResult{
|
||||||
|
ready: make(chan bool),
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) Callback(stateResult ActionCurrentState) bool {
|
||||||
|
if j.ReasoningCallback == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return j.ReasoningCallback(stateResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) CallbackWithResult(stateResult ActionState) {
|
||||||
|
if j.ResultCallback == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
j.ResultCallback(stateResult)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) SetNextAction(action *Action, params *ActionParams, reasoning string) {
|
||||||
|
j.nextAction = action
|
||||||
|
j.nextActionParams = params
|
||||||
|
j.nextActionReasoning = reasoning
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) AddPastAction(action Action, params *ActionParams) {
|
||||||
|
j.pastActions = append(j.pastActions, &ActionRequest{
|
||||||
|
Action: action,
|
||||||
|
Params: params,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) GetPastActions() []*ActionRequest {
|
||||||
|
return j.pastActions
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) GetNextAction() (*Action, *ActionParams, string) {
|
||||||
|
return j.nextAction, j.nextActionParams, j.nextActionReasoning
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) HasNextAction() bool {
|
||||||
|
return j.nextAction != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) ResetNextAction() {
|
||||||
|
j.nextAction = nil
|
||||||
|
j.nextActionParams = nil
|
||||||
|
j.nextActionReasoning = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithTextImage(text, image string) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.ConversationHistory = append(j.ConversationHistory, openai.ChatCompletionMessage{
|
||||||
|
Role: "user",
|
||||||
|
MultiContent: []openai.ChatMessagePart{
|
||||||
|
{
|
||||||
|
Type: openai.ChatMessagePartTypeText,
|
||||||
|
Text: text,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: openai.ChatMessagePartTypeImageURL,
|
||||||
|
ImageURL: &openai.ChatMessageImageURL{URL: image},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithText(text string) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.ConversationHistory = append(j.ConversationHistory, openai.ChatCompletionMessage{
|
||||||
|
Role: "user",
|
||||||
|
Content: text,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func newUUID() string {
|
||||||
|
// Generate UUID with google/uuid
|
||||||
|
// https://pkg.go.dev/github.com/google/uuid
|
||||||
|
|
||||||
|
// Generate a Version 4 UUID
|
||||||
|
u, err := uuid.NewRandom()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed to generate UUID: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return u.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewJob creates a new job
|
||||||
|
// It is a request to the agent to do something
|
||||||
|
// It has a JobResult to get the result asynchronously
|
||||||
|
// To wait for a Job result, use JobResult.WaitResult()
|
||||||
|
func NewJob(opts ...JobOption) *Job {
|
||||||
|
j := &Job{
|
||||||
|
Result: NewJobResult(),
|
||||||
|
UUID: newUUID(),
|
||||||
|
}
|
||||||
|
for _, o := range opts {
|
||||||
|
o(j)
|
||||||
|
}
|
||||||
|
|
||||||
|
var ctx context.Context
|
||||||
|
if j.context == nil {
|
||||||
|
ctx = context.Background()
|
||||||
|
} else {
|
||||||
|
ctx = j.context
|
||||||
|
}
|
||||||
|
|
||||||
|
context, cancel := context.WithCancel(ctx)
|
||||||
|
j.context = context
|
||||||
|
j.cancel = cancel
|
||||||
|
return j
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithUUID(uuid string) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.UUID = uuid
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WithContext(ctx context.Context) JobOption {
|
||||||
|
return func(j *Job) {
|
||||||
|
j.context = ctx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) Cancel() {
|
||||||
|
j.cancel()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (j *Job) GetContext() context.Context {
|
||||||
|
return j.context
|
||||||
|
}
|
||||||
67
core/types/result.go
Normal file
67
core/types/result.go
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
package types
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
// JobResult is the result of a job
|
||||||
|
type JobResult struct {
|
||||||
|
sync.Mutex
|
||||||
|
// The result of a job
|
||||||
|
State []ActionState
|
||||||
|
Conversation []openai.ChatCompletionMessage
|
||||||
|
|
||||||
|
Finalizers []func([]openai.ChatCompletionMessage)
|
||||||
|
|
||||||
|
Response string
|
||||||
|
Error error
|
||||||
|
ready chan bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetResult sets the result of a job
|
||||||
|
func (j *JobResult) SetResult(text ActionState) {
|
||||||
|
j.Lock()
|
||||||
|
defer j.Unlock()
|
||||||
|
|
||||||
|
j.State = append(j.State, text)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetResult sets the result of a job
|
||||||
|
func (j *JobResult) Finish(e error) {
|
||||||
|
j.Lock()
|
||||||
|
j.Error = e
|
||||||
|
j.Unlock()
|
||||||
|
|
||||||
|
close(j.ready)
|
||||||
|
|
||||||
|
for _, f := range j.Finalizers {
|
||||||
|
f(j.Conversation)
|
||||||
|
}
|
||||||
|
j.Finalizers = []func([]openai.ChatCompletionMessage){}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddFinalizer adds a finalizer to the job result
|
||||||
|
func (j *JobResult) AddFinalizer(f func([]openai.ChatCompletionMessage)) {
|
||||||
|
j.Lock()
|
||||||
|
defer j.Unlock()
|
||||||
|
|
||||||
|
j.Finalizers = append(j.Finalizers, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetResult sets the result of a job
|
||||||
|
func (j *JobResult) SetResponse(response string) {
|
||||||
|
j.Lock()
|
||||||
|
defer j.Unlock()
|
||||||
|
|
||||||
|
j.Response = response
|
||||||
|
}
|
||||||
|
|
||||||
|
// WaitResult waits for the result of a job
|
||||||
|
func (j *JobResult) WaitResult() *JobResult {
|
||||||
|
<-j.ready
|
||||||
|
j.Lock()
|
||||||
|
defer j.Unlock()
|
||||||
|
return j
|
||||||
|
}
|
||||||
75
docker-compose.gpu.intel.yaml
Normal file
75
docker-compose.gpu.intel.yaml
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
services:
|
||||||
|
localai:
|
||||||
|
# See https://localai.io/basics/container/#standard-container-images for
|
||||||
|
# a list of available container images (or build your own with the provided Dockerfile)
|
||||||
|
# Available images with CUDA, ROCm, SYCL, Vulkan
|
||||||
|
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
||||||
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
|
image: localai/localai:master-sycl-f32-ffmpeg-core
|
||||||
|
command:
|
||||||
|
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
||||||
|
- arcee-agent # (smaller)
|
||||||
|
- granite-embedding-107m-multilingual
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
|
interval: 60s
|
||||||
|
timeout: 10m
|
||||||
|
retries: 120
|
||||||
|
ports:
|
||||||
|
- 8081:8080
|
||||||
|
environment:
|
||||||
|
- DEBUG=true
|
||||||
|
#- LOCALAI_API_KEY=sk-1234567890
|
||||||
|
volumes:
|
||||||
|
- ./volumes/models:/build/models:cached
|
||||||
|
- ./volumes/images:/tmp/generated/images
|
||||||
|
devices:
|
||||||
|
# On a system with integrated GPU and an Arc 770, this is the Arc 770
|
||||||
|
- /dev/dri/card1
|
||||||
|
- /dev/dri/renderD129
|
||||||
|
|
||||||
|
localrecall:
|
||||||
|
image: quay.io/mudler/localrecall:main
|
||||||
|
ports:
|
||||||
|
- 8080
|
||||||
|
environment:
|
||||||
|
- COLLECTION_DB_PATH=/db
|
||||||
|
- EMBEDDING_MODEL=granite-embedding-107m-multilingual
|
||||||
|
- FILE_ASSETS=/assets
|
||||||
|
- OPENAI_API_KEY=sk-1234567890
|
||||||
|
- OPENAI_BASE_URL=http://localai:8080
|
||||||
|
volumes:
|
||||||
|
- ./volumes/localrag/db:/db
|
||||||
|
- ./volumes/localrag/assets/:/assets
|
||||||
|
|
||||||
|
localrecall-healthcheck:
|
||||||
|
depends_on:
|
||||||
|
localrecall:
|
||||||
|
condition: service_started
|
||||||
|
image: busybox
|
||||||
|
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
||||||
|
|
||||||
|
localagi:
|
||||||
|
depends_on:
|
||||||
|
localai:
|
||||||
|
condition: service_healthy
|
||||||
|
localrecall-healthcheck:
|
||||||
|
condition: service_completed_successfully
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.webui
|
||||||
|
ports:
|
||||||
|
- 8080:3000
|
||||||
|
image: quay.io/mudler/localagi:master
|
||||||
|
environment:
|
||||||
|
- LOCALAGI_MODEL=arcee-agent
|
||||||
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
|
- LOCALAGI_STATE_DIR=/pool
|
||||||
|
- LOCALAGI_TIMEOUT=5m
|
||||||
|
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
|
volumes:
|
||||||
|
- ./volumes/localagi/:/pool
|
||||||
85
docker-compose.gpu.yaml
Normal file
85
docker-compose.gpu.yaml
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
services:
|
||||||
|
localai:
|
||||||
|
# See https://localai.io/basics/container/#standard-container-images for
|
||||||
|
# a list of available container images (or build your own with the provided Dockerfile)
|
||||||
|
# Available images with CUDA, ROCm, SYCL, Vulkan
|
||||||
|
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
||||||
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
|
image: localai/localai:master-gpu-nvidia-cuda-12
|
||||||
|
command:
|
||||||
|
- mlabonne_gemma-3-27b-it-abliterated
|
||||||
|
- qwen_qwq-32b
|
||||||
|
# Other good alternative options:
|
||||||
|
# - rombo-org_rombo-llm-v3.0-qwen-32b # minimum suggested model
|
||||||
|
# - arcee-agent
|
||||||
|
- granite-embedding-107m-multilingual
|
||||||
|
- flux.1-dev
|
||||||
|
- minicpm-v-2_6
|
||||||
|
environment:
|
||||||
|
# Enable if you have a single GPU which don't fit all the models
|
||||||
|
- LOCALAI_SINGLE_ACTIVE_BACKEND=true
|
||||||
|
- DEBUG=true
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 20m
|
||||||
|
retries: 20
|
||||||
|
ports:
|
||||||
|
- 8081:8080
|
||||||
|
volumes:
|
||||||
|
- ./volumes/models:/build/models:cached
|
||||||
|
- ./volumes/images:/tmp/generated/images
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
reservations:
|
||||||
|
devices:
|
||||||
|
- driver: nvidia
|
||||||
|
count: 1
|
||||||
|
capabilities: [gpu]
|
||||||
|
localrecall:
|
||||||
|
image: quay.io/mudler/localrecall:main
|
||||||
|
ports:
|
||||||
|
- 8080
|
||||||
|
environment:
|
||||||
|
- COLLECTION_DB_PATH=/db
|
||||||
|
- EMBEDDING_MODEL=granite-embedding-107m-multilingual
|
||||||
|
- FILE_ASSETS=/assets
|
||||||
|
- OPENAI_API_KEY=sk-1234567890
|
||||||
|
- OPENAI_BASE_URL=http://localai:8080
|
||||||
|
volumes:
|
||||||
|
- ./volumes/localrag/db:/db
|
||||||
|
- ./volumes/localrag/assets/:/assets
|
||||||
|
|
||||||
|
localrecall-healthcheck:
|
||||||
|
depends_on:
|
||||||
|
localrecall:
|
||||||
|
condition: service_started
|
||||||
|
image: busybox
|
||||||
|
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
||||||
|
|
||||||
|
localagi:
|
||||||
|
depends_on:
|
||||||
|
localai:
|
||||||
|
condition: service_healthy
|
||||||
|
localrecall-healthcheck:
|
||||||
|
condition: service_completed_successfully
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Dockerfile.webui
|
||||||
|
ports:
|
||||||
|
- 8080:3000
|
||||||
|
image: quay.io/mudler/localagi:master
|
||||||
|
environment:
|
||||||
|
- LOCALAGI_MODEL=qwen_qwq-32b
|
||||||
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
|
- LOCALAGI_STATE_DIR=/pool
|
||||||
|
- LOCALAGI_TIMEOUT=5m
|
||||||
|
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||||
|
- LOCALAGI_MULTIMODAL_MODEL=minicpm-v-2_6
|
||||||
|
- LOCALAGI_IMAGE_MODEL=flux.1-dev
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
|
volumes:
|
||||||
|
- ./volumes/localagi/:/pool
|
||||||
@@ -1,31 +1,78 @@
|
|||||||
version: "3.9"
|
|
||||||
services:
|
services:
|
||||||
api:
|
localai:
|
||||||
image: quay.io/go-skynet/local-ai:master
|
# See https://localai.io/basics/container/#standard-container-images for
|
||||||
|
# a list of available container images (or build your own with the provided Dockerfile)
|
||||||
|
# Available images with CUDA, ROCm, SYCL, Vulkan
|
||||||
|
# Image list (quay.io): https://quay.io/repository/go-skynet/local-ai?tab=tags
|
||||||
|
# Image list (dockerhub): https://hub.docker.com/r/localai/localai
|
||||||
|
image: localai/localai:master-ffmpeg-core
|
||||||
|
command:
|
||||||
|
- arcee-agent # (smaller)
|
||||||
|
- granite-embedding-107m-multilingual
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
test: ["CMD", "curl", "-f", "http://localhost:8080/readyz"]
|
||||||
interval: 1m
|
interval: 60s
|
||||||
timeout: 120m
|
timeout: 10m
|
||||||
retries: 120
|
retries: 120
|
||||||
ports:
|
ports:
|
||||||
- 8090:8080
|
- 8081:8080
|
||||||
env_file:
|
environment:
|
||||||
- .env
|
- DEBUG=true
|
||||||
|
#- LOCALAI_API_KEY=sk-1234567890
|
||||||
volumes:
|
volumes:
|
||||||
- ./models:/models:cached
|
- ./volumes/models:/build/models:cached
|
||||||
- ./config:/config:cached
|
- ./volumes/images:/tmp/generated/images
|
||||||
command: ["/usr/bin/local-ai" ]
|
|
||||||
|
# decomment the following piece if running with Nvidia GPUs
|
||||||
|
# deploy:
|
||||||
|
# resources:
|
||||||
|
# reservations:
|
||||||
|
# devices:
|
||||||
|
# - driver: nvidia
|
||||||
|
# count: 1
|
||||||
|
# capabilities: [gpu]
|
||||||
|
localrecall:
|
||||||
|
image: quay.io/mudler/localrecall:main
|
||||||
|
ports:
|
||||||
|
- 8080
|
||||||
|
environment:
|
||||||
|
- COLLECTION_DB_PATH=/db
|
||||||
|
- EMBEDDING_MODEL=granite-embedding-107m-multilingual
|
||||||
|
- FILE_ASSETS=/assets
|
||||||
|
- OPENAI_API_KEY=sk-1234567890
|
||||||
|
- OPENAI_BASE_URL=http://localai:8080
|
||||||
|
volumes:
|
||||||
|
- ./volumes/localrag/db:/db
|
||||||
|
- ./volumes/localrag/assets/:/assets
|
||||||
|
|
||||||
|
localrecall-healthcheck:
|
||||||
|
depends_on:
|
||||||
|
localrecall:
|
||||||
|
condition: service_started
|
||||||
|
image: busybox
|
||||||
|
command: ["sh", "-c", "until wget -q -O - http://localrecall:8080 > /dev/null 2>&1; do echo 'Waiting for localrecall...'; sleep 1; done; echo 'localrecall is up!'"]
|
||||||
|
|
||||||
localagi:
|
localagi:
|
||||||
|
depends_on:
|
||||||
|
localai:
|
||||||
|
condition: service_healthy
|
||||||
|
localrecall-healthcheck:
|
||||||
|
condition: service_completed_successfully
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: Dockerfile.webui
|
||||||
devices:
|
ports:
|
||||||
- /dev/snd
|
- 8080:3000
|
||||||
depends_on:
|
#image: quay.io/mudler/localagi:master
|
||||||
api:
|
environment:
|
||||||
condition: service_healthy
|
- LOCALAGI_MODEL=arcee-agent
|
||||||
|
- LOCALAGI_LLM_API_URL=http://localai:8080
|
||||||
|
#- LOCALAGI_LLM_API_KEY=sk-1234567890
|
||||||
|
- LOCALAGI_LOCALRAG_URL=http://localrecall:8080
|
||||||
|
- LOCALAGI_STATE_DIR=/pool
|
||||||
|
- LOCALAGI_TIMEOUT=5m
|
||||||
|
- LOCALAGI_ENABLE_CONVERSATIONS_LOGGING=false
|
||||||
|
extra_hosts:
|
||||||
|
- "host.docker.internal:host-gateway"
|
||||||
volumes:
|
volumes:
|
||||||
- ./db:/app/db
|
- ./volumes/localagi/:/pool
|
||||||
- ./data:/data
|
|
||||||
env_file:
|
|
||||||
- .env
|
|
||||||
|
|||||||
12
example/realtimesst/main.py
Executable file
12
example/realtimesst/main.py
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
from RealtimeSTT import AudioToTextRecorder
|
||||||
|
|
||||||
|
def process_text(text):
|
||||||
|
print(text)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
recorder = AudioToTextRecorder(wake_words="jarvis")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
recorder.text(process_text)
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
FROM python:3.10-bullseye
|
|
||||||
WORKDIR /app
|
|
||||||
COPY ./requirements.txt /app/requirements.txt
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
COPY . /app
|
|
||||||
|
|
||||||
ENTRYPOINT [ "python", "./main.py" ];
|
|
||||||
@@ -1,371 +0,0 @@
|
|||||||
import openai
|
|
||||||
#from langchain.embeddings import HuggingFaceEmbeddings
|
|
||||||
from langchain.embeddings import LocalAIEmbeddings
|
|
||||||
|
|
||||||
from langchain.document_loaders import (
|
|
||||||
SitemapLoader,
|
|
||||||
# GitHubIssuesLoader,
|
|
||||||
# GitLoader,
|
|
||||||
)
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
import sys
|
|
||||||
from config import config
|
|
||||||
|
|
||||||
from queue import Queue
|
|
||||||
import asyncio
|
|
||||||
import threading
|
|
||||||
from localagi import LocalAGI
|
|
||||||
from loguru import logger
|
|
||||||
from ascii_magic import AsciiArt
|
|
||||||
from duckduckgo_search import DDGS
|
|
||||||
from typing import Dict, List
|
|
||||||
import os
|
|
||||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
||||||
import discord
|
|
||||||
import openai
|
|
||||||
import urllib.request
|
|
||||||
from datetime import datetime
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from io import StringIO
|
|
||||||
FILE_NAME_FORMAT = '%Y_%m_%d_%H_%M_%S'
|
|
||||||
|
|
||||||
EMBEDDINGS_MODEL = config["agent"]["embeddings_model"]
|
|
||||||
EMBEDDINGS_API_BASE = config["agent"]["embeddings_api_base"]
|
|
||||||
PERSISTENT_DIR = config["agent"]["persistent_dir"]
|
|
||||||
MILVUS_HOST = config["agent"]["milvus_host"] if "milvus_host" in config["agent"] else ""
|
|
||||||
MILVUS_PORT = config["agent"]["milvus_port"] if "milvus_port" in config["agent"] else 0
|
|
||||||
MEMORY_COLLECTION = config["agent"]["memory_collection"]
|
|
||||||
DB_DIR = config["agent"]["db_dir"]
|
|
||||||
MEMORY_CHUNK_SIZE = int(config["agent"]["memory_chunk_size"])
|
|
||||||
MEMORY_CHUNK_OVERLAP = int(config["agent"]["memory_chunk_overlap"])
|
|
||||||
MEMORY_RESULTS = int(config["agent"]["memory_results"])
|
|
||||||
MEMORY_SEARCH_TYPE = config["agent"]["memory_search_type"]
|
|
||||||
|
|
||||||
if not os.environ.get("PYSQL_HACK", "false") == "false":
|
|
||||||
# these three lines swap the stdlib sqlite3 lib with the pysqlite3 package for chroma
|
|
||||||
__import__('pysqlite3')
|
|
||||||
import sys
|
|
||||||
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
from langchain.vectorstores import Chroma
|
|
||||||
else:
|
|
||||||
from langchain.vectorstores import Milvus
|
|
||||||
|
|
||||||
embeddings = LocalAIEmbeddings(model=EMBEDDINGS_MODEL,openai_api_base=EMBEDDINGS_API_BASE)
|
|
||||||
|
|
||||||
loop = None
|
|
||||||
channel = None
|
|
||||||
def call(thing):
|
|
||||||
return asyncio.run_coroutine_threadsafe(thing,loop).result()
|
|
||||||
|
|
||||||
def ingest(a, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(a)
|
|
||||||
chunk_size = MEMORY_CHUNK_SIZE
|
|
||||||
chunk_overlap = MEMORY_CHUNK_OVERLAP
|
|
||||||
logger.info(">>> ingesting: ")
|
|
||||||
logger.info(q)
|
|
||||||
documents = []
|
|
||||||
sitemap_loader = SitemapLoader(web_path=q["url"])
|
|
||||||
text_splitter = RecursiveCharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_overlap)
|
|
||||||
documents.extend(sitemap_loader.load())
|
|
||||||
texts = text_splitter.split_documents(documents)
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
db = Chroma.from_documents(texts,embeddings,collection_name=MEMORY_COLLECTION, persist_directory=DB_DIR)
|
|
||||||
db.persist()
|
|
||||||
db = None
|
|
||||||
else:
|
|
||||||
Milvus.from_documents(texts,embeddings,collection_name=MEMORY_COLLECTION, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
return f"Documents ingested"
|
|
||||||
|
|
||||||
def create_image(a, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(a)
|
|
||||||
logger.info(">>> creating image: ")
|
|
||||||
logger.info(q["description"])
|
|
||||||
size=f"{q['width']}x{q['height']}"
|
|
||||||
response = openai.Image.create(prompt=q["description"], n=1, size=size)
|
|
||||||
image_url = response["data"][0]["url"]
|
|
||||||
image_name = download_image(image_url)
|
|
||||||
image_path = f"{PERSISTENT_DIR}{image_name}"
|
|
||||||
|
|
||||||
file = discord.File(image_path, filename=image_name)
|
|
||||||
embed = discord.Embed(title="Generated image")
|
|
||||||
embed.set_image(url=f"attachment://{image_name}")
|
|
||||||
|
|
||||||
call(channel.send(file=file, content=f"Here is what I have generated", embed=embed))
|
|
||||||
|
|
||||||
return f"Image created: {response['data'][0]['url']}"
|
|
||||||
|
|
||||||
def download_image(url: str):
|
|
||||||
file_name = f"{datetime.now().strftime(FILE_NAME_FORMAT)}.jpg"
|
|
||||||
full_path = f"{PERSISTENT_DIR}{file_name}"
|
|
||||||
urllib.request.urlretrieve(url, full_path)
|
|
||||||
return file_name
|
|
||||||
|
|
||||||
|
|
||||||
### Agent capabilities
|
|
||||||
### These functions are called by the agent to perform actions
|
|
||||||
###
|
|
||||||
def save(memory, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(memory)
|
|
||||||
logger.info(">>> saving to memories: ")
|
|
||||||
logger.info(q["content"])
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client = Chroma(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, persist_directory=DB_DIR)
|
|
||||||
else:
|
|
||||||
chroma_client = Milvus(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
chroma_client.add_texts([q["content"]],[{"id": str(uuid.uuid4())}])
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client.persist()
|
|
||||||
chroma_client = None
|
|
||||||
return f"The object was saved permanently to memory."
|
|
||||||
|
|
||||||
def search_memory(query, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(query)
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client = Chroma(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, persist_directory=DB_DIR)
|
|
||||||
else:
|
|
||||||
chroma_client = Milvus(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
#docs = chroma_client.search(q["keywords"], "mmr")
|
|
||||||
retriever = chroma_client.as_retriever(search_type=MEMORY_SEARCH_TYPE, search_kwargs={"k": MEMORY_RESULTS})
|
|
||||||
|
|
||||||
docs = retriever.get_relevant_documents(q["keywords"])
|
|
||||||
text_res="Memories found in the database:\n"
|
|
||||||
|
|
||||||
sources = set() # To store unique sources
|
|
||||||
|
|
||||||
# Collect unique sources
|
|
||||||
for document in docs:
|
|
||||||
if "source" in document.metadata:
|
|
||||||
sources.add(document.metadata["source"])
|
|
||||||
|
|
||||||
for doc in docs:
|
|
||||||
# drop newlines from page_content
|
|
||||||
content = doc.page_content.replace("\n", " ")
|
|
||||||
content = " ".join(content.split())
|
|
||||||
text_res+="- "+content+"\n"
|
|
||||||
|
|
||||||
# Print the relevant sources used for the answer
|
|
||||||
for source in sources:
|
|
||||||
if source.startswith("http"):
|
|
||||||
text_res += "" + source + "\n"
|
|
||||||
|
|
||||||
chroma_client = None
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
return text_res
|
|
||||||
#return localagi.post_process(text_res)
|
|
||||||
|
|
||||||
# write file to disk with content
|
|
||||||
def save_file(arg, agent_actions={}, localagi=None):
|
|
||||||
arg = json.loads(arg)
|
|
||||||
file = filename = arg["filename"]
|
|
||||||
content = arg["content"]
|
|
||||||
# create persistent dir if does not exist
|
|
||||||
if not os.path.exists(PERSISTENT_DIR):
|
|
||||||
os.makedirs(PERSISTENT_DIR)
|
|
||||||
# write the file in the directory specified
|
|
||||||
file = os.path.join(PERSISTENT_DIR, filename)
|
|
||||||
|
|
||||||
# Check if the file already exists
|
|
||||||
if os.path.exists(file):
|
|
||||||
mode = 'a' # Append mode
|
|
||||||
else:
|
|
||||||
mode = 'w' # Write mode
|
|
||||||
|
|
||||||
with open(file, mode) as f:
|
|
||||||
f.write(content)
|
|
||||||
|
|
||||||
file = discord.File(file, filename=filename)
|
|
||||||
call(channel.send(file=file, content=f"Here is what I have generated"))
|
|
||||||
return f"File {file} saved successfully."
|
|
||||||
|
|
||||||
def ddg(query: str, num_results: int, backend: str = "api") -> List[Dict[str, str]]:
|
|
||||||
"""Run query through DuckDuckGo and return metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
query: The query to search for.
|
|
||||||
num_results: The number of results to return.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list of dictionaries with the following keys:
|
|
||||||
snippet - The description of the result.
|
|
||||||
title - The title of the result.
|
|
||||||
link - The link to the result.
|
|
||||||
"""
|
|
||||||
ddgs = DDGS()
|
|
||||||
try:
|
|
||||||
results = ddgs.text(
|
|
||||||
query,
|
|
||||||
backend=backend,
|
|
||||||
)
|
|
||||||
if results is None:
|
|
||||||
return [{"Result": "No good DuckDuckGo Search Result was found"}]
|
|
||||||
|
|
||||||
def to_metadata(result: Dict) -> Dict[str, str]:
|
|
||||||
if backend == "news":
|
|
||||||
return {
|
|
||||||
"date": result["date"],
|
|
||||||
"title": result["title"],
|
|
||||||
"snippet": result["body"],
|
|
||||||
"source": result["source"],
|
|
||||||
"link": result["url"],
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
"snippet": result["body"],
|
|
||||||
"title": result["title"],
|
|
||||||
"link": result["href"],
|
|
||||||
}
|
|
||||||
|
|
||||||
formatted_results = []
|
|
||||||
for i, res in enumerate(results, 1):
|
|
||||||
if res is not None:
|
|
||||||
formatted_results.append(to_metadata(res))
|
|
||||||
if len(formatted_results) == num_results:
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
return []
|
|
||||||
return formatted_results
|
|
||||||
|
|
||||||
## Search on duckduckgo
|
|
||||||
def search_duckduckgo(a, agent_actions={}, localagi=None):
|
|
||||||
a = json.loads(a)
|
|
||||||
list=ddg(a["query"], 2)
|
|
||||||
|
|
||||||
text_res=""
|
|
||||||
for doc in list:
|
|
||||||
text_res+=f"""{doc["link"]}: {doc["title"]} {doc["snippet"]}\n"""
|
|
||||||
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
return text_res
|
|
||||||
#l = json.dumps(list)
|
|
||||||
#return l
|
|
||||||
|
|
||||||
### End Agent capabilities
|
|
||||||
###
|
|
||||||
|
|
||||||
### Agent action definitions
|
|
||||||
agent_actions = {
|
|
||||||
"generate_picture": {
|
|
||||||
"function": create_image,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'For creating a picture, the assistant replies with "generate_picture" and a detailed description, enhancing it with as much detail as possible.',
|
|
||||||
"signature": {
|
|
||||||
"name": "generate_picture",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"description": {
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"width": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
"height": {
|
|
||||||
"type": "number",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"search_internet": {
|
|
||||||
"function": search_duckduckgo,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'For searching the internet with a query, the assistant replies with the action "search_internet" and the query to search.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_internet",
|
|
||||||
"description": """For searching internet.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"query": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_file": {
|
|
||||||
"function": save_file,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_file", the filename and content to save for writing a file to disk permanently. This can be used to store the result of complex actions locally.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_file",
|
|
||||||
"description": """For saving a file to disk with content.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"filename": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"ingest": {
|
|
||||||
"function": ingest,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "ingest" when there is an url to a sitemap to ingest memories from.',
|
|
||||||
"signature": {
|
|
||||||
"name": "ingest",
|
|
||||||
"description": """Save or store informations into memory.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"url": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["url"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_memory": {
|
|
||||||
"function": save,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_memory" and the string to remember or store an information that thinks it is relevant permanently.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_memory",
|
|
||||||
"description": """Save or store informations into memory.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["content"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"search_memory": {
|
|
||||||
"function": search_memory,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "search_memory" for searching between its memories with a query term.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_memory",
|
|
||||||
"description": """Search in memory""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"keywords": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "reasoning behind the intent"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["keywords"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
[discord]
|
|
||||||
server_id =
|
|
||||||
api_key =
|
|
||||||
|
|
||||||
[openai]
|
|
||||||
api_key = sl-d-d-d
|
|
||||||
|
|
||||||
[settings]
|
|
||||||
default_size = 1024x1024
|
|
||||||
file_path = images/
|
|
||||||
file_name_format = %Y_%m_%d_%H_%M_%S
|
|
||||||
|
|
||||||
[agent]
|
|
||||||
llm_model = gpt-4
|
|
||||||
tts_model = en-us-kathleen-low.onnx
|
|
||||||
tts_api_base = http://api:8080
|
|
||||||
functions_model = functions
|
|
||||||
api_base = http://api:8080
|
|
||||||
stablediffusion_api_base = http://api:8080
|
|
||||||
stablediffusion_model = stablediffusion
|
|
||||||
embeddings_model = all-MiniLM-L6-v2
|
|
||||||
embeddings_api_base = http://api:30316/v1
|
|
||||||
persistent_dir = /tmp/data
|
|
||||||
db_dir = /tmp/data/db
|
|
||||||
milvus_host =
|
|
||||||
milvus_port =
|
|
||||||
memory_collection = localai
|
|
||||||
memory_chunk_size = 600
|
|
||||||
memory_chunk_overlap = 110
|
|
||||||
memory_results = 3
|
|
||||||
memory_search_type = mmr
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
from configparser import ConfigParser
|
|
||||||
|
|
||||||
config_file = "config.ini"
|
|
||||||
config = ConfigParser(interpolation=None)
|
|
||||||
config.read(config_file)
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
pip uninstall hnswlib chromadb-hnswlib -y
|
|
||||||
pip install hnswlib chromadb-hnswlib
|
|
||||||
cd /app
|
|
||||||
python3 /app/main.py
|
|
||||||
@@ -1,292 +0,0 @@
|
|||||||
"""
|
|
||||||
This is a discord bot for generating images using OpenAI's DALL-E
|
|
||||||
|
|
||||||
Author: Stefan Rial
|
|
||||||
YouTube: https://youtube.com/@StefanRial
|
|
||||||
GitHub: https://https://github.com/StefanRial/ClaudeBot
|
|
||||||
E-Mail: mail.stefanrial@gmail.com
|
|
||||||
"""
|
|
||||||
|
|
||||||
from config import config
|
|
||||||
import os
|
|
||||||
|
|
||||||
OPENAI_API_KEY = config["openai"][str("api_key")]
|
|
||||||
|
|
||||||
if OPENAI_API_KEY == "":
|
|
||||||
OPENAI_API_KEY = "foo"
|
|
||||||
if "OPENAI_API_BASE" not in os.environ:
|
|
||||||
os.environ["OPENAI_API_BASE"] = config["agent"]["api_base"]
|
|
||||||
os.environ["OPENAI_API_KEY"] = OPENAI_API_KEY
|
|
||||||
import openai
|
|
||||||
|
|
||||||
import discord
|
|
||||||
|
|
||||||
import urllib.request
|
|
||||||
from datetime import datetime
|
|
||||||
from queue import Queue
|
|
||||||
import agent
|
|
||||||
from agent import agent_actions
|
|
||||||
from localagi import LocalAGI
|
|
||||||
import asyncio
|
|
||||||
import threading
|
|
||||||
from discord import app_commands
|
|
||||||
import functools
|
|
||||||
import typing
|
|
||||||
|
|
||||||
SERVER_ID = config["discord"]["server_id"]
|
|
||||||
DISCORD_API_KEY = config["discord"][str("api_key")]
|
|
||||||
OPENAI_ORG = config["openai"][str("organization")]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
FILE_PATH = config["settings"][str("file_path")]
|
|
||||||
FILE_NAME_FORMAT = config["settings"][str("file_name_format")]
|
|
||||||
CRITIC = config["settings"]["critic"] if "critic" in config["settings"] else False
|
|
||||||
SIZE_LARGE = "1024x1024"
|
|
||||||
SIZE_MEDIUM = "512x512"
|
|
||||||
SIZE_SMALL = "256x256"
|
|
||||||
SIZE_DEFAULT = config["settings"][str("default_size")]
|
|
||||||
|
|
||||||
GUILD = discord.Object(id=SERVER_ID)
|
|
||||||
|
|
||||||
if not os.path.isdir(FILE_PATH):
|
|
||||||
os.mkdir(FILE_PATH)
|
|
||||||
|
|
||||||
|
|
||||||
class Client(discord.Client):
|
|
||||||
def __init__(self, *, intents: discord.Intents):
|
|
||||||
super().__init__(intents=intents)
|
|
||||||
self.tree = app_commands.CommandTree(self)
|
|
||||||
|
|
||||||
async def setup_hook(self):
|
|
||||||
self.tree.copy_global_to(guild=GUILD)
|
|
||||||
await self.tree.sync(guild=GUILD)
|
|
||||||
|
|
||||||
|
|
||||||
claude_intents = discord.Intents.default()
|
|
||||||
claude_intents.messages = True
|
|
||||||
claude_intents.message_content = True
|
|
||||||
client = Client(intents=claude_intents)
|
|
||||||
|
|
||||||
openai.organization = OPENAI_ORG
|
|
||||||
openai.api_key = OPENAI_API_KEY
|
|
||||||
openai.Model.list()
|
|
||||||
|
|
||||||
|
|
||||||
async def close_thread(thread: discord.Thread):
|
|
||||||
await thread.edit(name="closed")
|
|
||||||
await thread.send(
|
|
||||||
embed=discord.Embed(
|
|
||||||
description="**Thread closed** - Context limit reached, closing...",
|
|
||||||
color=discord.Color.blue(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
await thread.edit(archived=True, locked=True)
|
|
||||||
|
|
||||||
@client.event
|
|
||||||
async def on_ready():
|
|
||||||
print(f"We have logged in as {client.user}")
|
|
||||||
|
|
||||||
def diff(history, processed):
|
|
||||||
return [item for item in processed if item not in history]
|
|
||||||
|
|
||||||
def analyze_history(history, processed, callback, channel):
|
|
||||||
diff_list = diff(history, processed)
|
|
||||||
for item in diff_list:
|
|
||||||
if item["role"] == "function":
|
|
||||||
content = item["content"]
|
|
||||||
# Function result
|
|
||||||
callback(channel.send(f"⚙️ Processed: {content}"))
|
|
||||||
if item["role"] == "assistant" and "function_call" in item:
|
|
||||||
function_name = item["function_call"]["name"]
|
|
||||||
function_parameters = item["function_call"]["arguments"]
|
|
||||||
# Function call
|
|
||||||
callback(channel.send(f"⚙️ Called: {function_name} with {function_parameters}"))
|
|
||||||
|
|
||||||
def run_localagi_thread_history(history, message, thread, loop):
|
|
||||||
agent.channel = message.channel
|
|
||||||
def call(thing):
|
|
||||||
return asyncio.run_coroutine_threadsafe(thing,loop).result()
|
|
||||||
sent_message = call(thread.send(f"⚙️ LocalAGI starts"))
|
|
||||||
|
|
||||||
user = message.author
|
|
||||||
def action_callback(name, parameters):
|
|
||||||
call(sent_message.edit(content=f"⚙️ Calling function '{name}' with {parameters}"))
|
|
||||||
def reasoning_callback(name, reasoning):
|
|
||||||
call(sent_message.edit(content=f"🤔 I'm thinking... '{reasoning}' (calling '{name}'), please wait.."))
|
|
||||||
|
|
||||||
localagi = LocalAGI(
|
|
||||||
agent_actions=agent_actions,
|
|
||||||
llm_model=config["agent"]["llm_model"],
|
|
||||||
tts_model=config["agent"]["tts_model"],
|
|
||||||
action_callback=action_callback,
|
|
||||||
reasoning_callback=reasoning_callback,
|
|
||||||
tts_api_base=config["agent"]["tts_api_base"],
|
|
||||||
functions_model=config["agent"]["functions_model"],
|
|
||||||
api_base=config["agent"]["api_base"],
|
|
||||||
stablediffusion_api_base=config["agent"]["stablediffusion_api_base"],
|
|
||||||
stablediffusion_model=config["agent"]["stablediffusion_model"],
|
|
||||||
)
|
|
||||||
# remove bot ID from the message content
|
|
||||||
message.content = message.content.replace(f"<@{client.user.id}>", "")
|
|
||||||
conversation_history = localagi.evaluate(
|
|
||||||
message.content,
|
|
||||||
history,
|
|
||||||
subtaskContext=True,
|
|
||||||
critic=CRITIC,
|
|
||||||
)
|
|
||||||
|
|
||||||
analyze_history(history, conversation_history, call, thread)
|
|
||||||
call(sent_message.edit(content=f"<@{user.id}> {conversation_history[-1]['content']}"))
|
|
||||||
|
|
||||||
def run_localagi_message(message, loop):
|
|
||||||
agent.channel = message.channel
|
|
||||||
def call(thing):
|
|
||||||
return asyncio.run_coroutine_threadsafe(thing,loop).result()
|
|
||||||
sent_message = call(message.channel.send(f"⚙️ LocalAGI starts"))
|
|
||||||
|
|
||||||
user = message.author
|
|
||||||
def action_callback(name, parameters):
|
|
||||||
call(sent_message.edit(content=f"⚙️ Calling function '{name}' with {parameters}"))
|
|
||||||
def reasoning_callback(name, reasoning):
|
|
||||||
call(sent_message.edit(content=f"🤔 I'm thinking... '{reasoning}' (calling '{name}'), please wait.."))
|
|
||||||
|
|
||||||
localagi = LocalAGI(
|
|
||||||
agent_actions=agent_actions,
|
|
||||||
llm_model=config["agent"]["llm_model"],
|
|
||||||
tts_model=config["agent"]["tts_model"],
|
|
||||||
action_callback=action_callback,
|
|
||||||
reasoning_callback=reasoning_callback,
|
|
||||||
tts_api_base=config["agent"]["tts_api_base"],
|
|
||||||
functions_model=config["agent"]["functions_model"],
|
|
||||||
api_base=config["agent"]["api_base"],
|
|
||||||
stablediffusion_api_base=config["agent"]["stablediffusion_api_base"],
|
|
||||||
stablediffusion_model=config["agent"]["stablediffusion_model"],
|
|
||||||
)
|
|
||||||
# remove bot ID from the message content
|
|
||||||
message.content = message.content.replace(f"<@{client.user.id}>", "")
|
|
||||||
|
|
||||||
conversation_history = localagi.evaluate(
|
|
||||||
message.content,
|
|
||||||
[],
|
|
||||||
critic=CRITIC,
|
|
||||||
subtaskContext=True,
|
|
||||||
)
|
|
||||||
analyze_history([], conversation_history, call, message.channel)
|
|
||||||
call(sent_message.edit(content=f"<@{user.id}> {conversation_history[-1]['content']}"))
|
|
||||||
|
|
||||||
def run_localagi(interaction, prompt, loop):
|
|
||||||
agent.channel = interaction.channel
|
|
||||||
|
|
||||||
def call(thing):
|
|
||||||
return asyncio.run_coroutine_threadsafe(thing,loop).result()
|
|
||||||
|
|
||||||
user = interaction.user
|
|
||||||
embed = discord.Embed(
|
|
||||||
description=f"<@{user.id}> wants to chat! 🤖💬",
|
|
||||||
color=discord.Color.green(),
|
|
||||||
)
|
|
||||||
embed.add_field(name=user.name, value=prompt)
|
|
||||||
|
|
||||||
call(interaction.response.send_message(embed=embed))
|
|
||||||
response = call(interaction.original_response())
|
|
||||||
|
|
||||||
# create the thread
|
|
||||||
thread = call(response.create_thread(
|
|
||||||
name=prompt,
|
|
||||||
slowmode_delay=1,
|
|
||||||
reason="gpt-bot",
|
|
||||||
auto_archive_duration=60,
|
|
||||||
))
|
|
||||||
thread.typing()
|
|
||||||
|
|
||||||
sent_message = call(thread.send(f"⚙️ LocalAGI starts"))
|
|
||||||
messages = []
|
|
||||||
def action_callback(name, parameters):
|
|
||||||
call(sent_message.edit(content=f"⚙️ Calling function '{name}' with {parameters}"))
|
|
||||||
def reasoning_callback(name, reasoning):
|
|
||||||
call(sent_message.edit(content=f"🤔 I'm thinking... '{reasoning}' (calling '{name}'), please wait.."))
|
|
||||||
|
|
||||||
localagi = LocalAGI(
|
|
||||||
agent_actions=agent_actions,
|
|
||||||
llm_model=config["agent"]["llm_model"],
|
|
||||||
tts_model=config["agent"]["tts_model"],
|
|
||||||
action_callback=action_callback,
|
|
||||||
reasoning_callback=reasoning_callback,
|
|
||||||
tts_api_base=config["agent"]["tts_api_base"],
|
|
||||||
functions_model=config["agent"]["functions_model"],
|
|
||||||
api_base=config["agent"]["api_base"],
|
|
||||||
stablediffusion_api_base=config["agent"]["stablediffusion_api_base"],
|
|
||||||
stablediffusion_model=config["agent"]["stablediffusion_model"],
|
|
||||||
)
|
|
||||||
# remove bot ID from the message content
|
|
||||||
prompt = prompt.replace(f"<@{client.user.id}>", "")
|
|
||||||
|
|
||||||
conversation_history = localagi.evaluate(
|
|
||||||
prompt,
|
|
||||||
messages,
|
|
||||||
subtaskContext=True,
|
|
||||||
critic=CRITIC,
|
|
||||||
)
|
|
||||||
analyze_history(messages, conversation_history, call, interaction.channel)
|
|
||||||
call(sent_message.edit(content=f"<@{user.id}> {conversation_history[-1]['content']}"))
|
|
||||||
|
|
||||||
@client.tree.command()
|
|
||||||
@app_commands.describe(prompt="Ask me anything!")
|
|
||||||
async def localai(interaction: discord.Interaction, prompt: str):
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
threading.Thread(target=run_localagi, args=[interaction, prompt,loop]).start()
|
|
||||||
|
|
||||||
# https://github.com/openai/gpt-discord-bot/blob/1161634a59c6fb642e58edb4f4fa1a46d2883d3b/src/utils.py#L15
|
|
||||||
def discord_message_to_message(message):
|
|
||||||
if (
|
|
||||||
message.type == discord.MessageType.thread_starter_message
|
|
||||||
and message.reference.cached_message
|
|
||||||
and len(message.reference.cached_message.embeds) > 0
|
|
||||||
and len(message.reference.cached_message.embeds[0].fields) > 0
|
|
||||||
):
|
|
||||||
field = message.reference.cached_message.embeds[0].fields[0]
|
|
||||||
if field.value:
|
|
||||||
return { "role": "user", "content": field.value }
|
|
||||||
else:
|
|
||||||
if message.content:
|
|
||||||
return { "role": "user", "content": message.content }
|
|
||||||
return None
|
|
||||||
|
|
||||||
@client.event
|
|
||||||
async def on_ready():
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
agent.loop = loop
|
|
||||||
|
|
||||||
@client.event
|
|
||||||
async def on_message(message):
|
|
||||||
# ignore messages from the bot
|
|
||||||
if message.author == client.user:
|
|
||||||
return
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
# ignore messages not in a thread
|
|
||||||
channel = message.channel
|
|
||||||
if not isinstance(channel, discord.Thread) and client.user.mentioned_in(message):
|
|
||||||
threading.Thread(target=run_localagi_message, args=[message,loop]).start()
|
|
||||||
return
|
|
||||||
if not isinstance(channel, discord.Thread):
|
|
||||||
return
|
|
||||||
# ignore threads not created by the bot
|
|
||||||
thread = channel
|
|
||||||
if thread.owner_id != client.user.id:
|
|
||||||
return
|
|
||||||
|
|
||||||
if thread.message_count > 5:
|
|
||||||
# too many messages, no longer going to reply
|
|
||||||
await close_thread(thread=thread)
|
|
||||||
return
|
|
||||||
|
|
||||||
channel_messages = [
|
|
||||||
discord_message_to_message(message)
|
|
||||||
async for message in thread.history(limit=5)
|
|
||||||
]
|
|
||||||
channel_messages = [x for x in channel_messages if x is not None]
|
|
||||||
channel_messages.reverse()
|
|
||||||
threading.Thread(target=run_localagi_thread_history, args=[channel_messages[:-1],message,thread,loop]).start()
|
|
||||||
|
|
||||||
client.run(DISCORD_API_KEY)
|
|
||||||
@@ -1,11 +0,0 @@
|
|||||||
discord
|
|
||||||
openai
|
|
||||||
git+https://github.com/mudler/LocalAGI
|
|
||||||
ascii-magic
|
|
||||||
loguru
|
|
||||||
duckduckgo_search==4.1.1
|
|
||||||
chromadb
|
|
||||||
pysqlite3-binary
|
|
||||||
langchain
|
|
||||||
beautifulsoup4
|
|
||||||
pymilvus
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
SLACK_APP_TOKEN=xapp-
|
|
||||||
SLACK_BOT_TOKEN=xoxb-
|
|
||||||
OPENAI_API_KEY=fake
|
|
||||||
|
|
||||||
OPENAI_SYSTEM_TEXT=Default System Text
|
|
||||||
OPENAI_TIMEOUT_SECONDS=30
|
|
||||||
OPENAI_MODEL=gpt-3.5-turbo
|
|
||||||
USE_SLACK_LANGUAGE=true
|
|
||||||
SLACK_APP_LOG_LEVEL=DEBUG
|
|
||||||
TRANSLATE_MARKDOWN=false
|
|
||||||
OPENAI_API_BASE=http://localhost:8080/v1
|
|
||||||
EMBEDDINGS_MODEL=all-MiniLM-L6-v2
|
|
||||||
EMBEDDINGS_API_BASE=http://localhost:8080/v1
|
|
||||||
LOCALAI_API_BASE=http://localhost:8080/v1
|
|
||||||
TTS_API_BASE=http://localhost:8080/v1
|
|
||||||
IMAGES_API_BASE=http://localhost:8080/v1
|
|
||||||
STABLEDIFFUSION_MODEL=dreamshaper
|
|
||||||
FUNCTIONS_MODEL=gpt-3.5-turbo
|
|
||||||
LLM_MODEL=gpt-3.5-turbo
|
|
||||||
TTS_MODEL=en-us-kathleen-low.onnx
|
|
||||||
PERSISTENT_DIR=/data
|
|
||||||
@@ -1,17 +0,0 @@
|
|||||||
FROM python:3.11.3-slim-buster
|
|
||||||
WORKDIR /app/
|
|
||||||
COPY requirements.txt /app/
|
|
||||||
|
|
||||||
RUN apt-get update && apt-get install build-essential git -y
|
|
||||||
RUN pip install -U pip && pip install -r requirements.txt
|
|
||||||
COPY *.py /app/
|
|
||||||
COPY *.sh /app/
|
|
||||||
RUN mkdir /app/app/
|
|
||||||
COPY app/*.py /app/app/
|
|
||||||
ENTRYPOINT /app/entrypoint.sh
|
|
||||||
|
|
||||||
# docker build . -t your-repo/chat-gpt-in-slack
|
|
||||||
# export SLACK_APP_TOKEN=xapp-...
|
|
||||||
# export SLACK_BOT_TOKEN=xoxb-...
|
|
||||||
# export OPENAI_API_KEY=sk-...
|
|
||||||
# docker run -e SLACK_APP_TOKEN=$SLACK_APP_TOKEN -e SLACK_BOT_TOKEN=$SLACK_BOT_TOKEN -e OPENAI_API_KEY=$OPENAI_API_KEY -it your-repo/chat-gpt-in-slack
|
|
||||||
@@ -1,21 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) Slack Technologies, LLC
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
@@ -1,396 +0,0 @@
|
|||||||
import openai
|
|
||||||
#from langchain.embeddings import HuggingFaceEmbeddings
|
|
||||||
from langchain.embeddings import LocalAIEmbeddings
|
|
||||||
|
|
||||||
from langchain.document_loaders import (
|
|
||||||
SitemapLoader,
|
|
||||||
# GitHubIssuesLoader,
|
|
||||||
# GitLoader,
|
|
||||||
)
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from app.env import *
|
|
||||||
from queue import Queue
|
|
||||||
import asyncio
|
|
||||||
import threading
|
|
||||||
from localagi import LocalAGI
|
|
||||||
|
|
||||||
from ascii_magic import AsciiArt
|
|
||||||
from duckduckgo_search import DDGS
|
|
||||||
from typing import Dict, List
|
|
||||||
import os
|
|
||||||
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
|
||||||
import openai
|
|
||||||
import urllib.request
|
|
||||||
from datetime import datetime
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from io import StringIO
|
|
||||||
FILE_NAME_FORMAT = '%Y_%m_%d_%H_%M_%S'
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if not os.environ.get("PYSQL_HACK", "false") == "false":
|
|
||||||
# these three lines swap the stdlib sqlite3 lib with the pysqlite3 package for chroma
|
|
||||||
__import__('pysqlite3')
|
|
||||||
import sys
|
|
||||||
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
from langchain.vectorstores import Chroma
|
|
||||||
else:
|
|
||||||
from langchain.vectorstores import Milvus
|
|
||||||
|
|
||||||
embeddings = LocalAIEmbeddings(model=EMBEDDINGS_MODEL,openai_api_base=EMBEDDINGS_API_BASE)
|
|
||||||
|
|
||||||
loop = None
|
|
||||||
channel = None
|
|
||||||
def call(thing):
|
|
||||||
return asyncio.run_coroutine_threadsafe(thing,loop).result()
|
|
||||||
|
|
||||||
def ingest(a, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(a)
|
|
||||||
chunk_size = MEMORY_CHUNK_SIZE
|
|
||||||
chunk_overlap = MEMORY_CHUNK_OVERLAP
|
|
||||||
print(">>> ingesting: ")
|
|
||||||
print(q)
|
|
||||||
documents = []
|
|
||||||
sitemap_loader = SitemapLoader(web_path=q["url"])
|
|
||||||
text_splitter = RecursiveCharacterTextSplitter(chunk_size=chunk_size, chunk_overlap=chunk_overlap)
|
|
||||||
documents.extend(sitemap_loader.load())
|
|
||||||
texts = text_splitter.split_documents(documents)
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
db = Chroma.from_documents(texts,embeddings,collection_name=MEMORY_COLLECTION, persist_directory=PERSISTENT_DIR)
|
|
||||||
db.persist()
|
|
||||||
db = None
|
|
||||||
else:
|
|
||||||
Milvus.from_documents(texts,embeddings,collection_name=MEMORY_COLLECTION, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
return f"Documents ingested"
|
|
||||||
# def create_image(a, agent_actions={}, localagi=None):
|
|
||||||
# """
|
|
||||||
# Create an image based on a description using OpenAI's API.
|
|
||||||
|
|
||||||
# Args:
|
|
||||||
# a (str): A JSON string containing the description, width, and height for the image to be created.
|
|
||||||
# agent_actions (dict, optional): A dictionary of agent actions. Defaults to {}.
|
|
||||||
# localagi (LocalAGI, optional): An instance of the LocalAGI class. Defaults to None.
|
|
||||||
|
|
||||||
# Returns:
|
|
||||||
# str: A string containing the URL of the created image.
|
|
||||||
# """
|
|
||||||
# q = json.loads(a)
|
|
||||||
# print(">>> creating image: ")
|
|
||||||
# print(q["description"])
|
|
||||||
# size=f"{q['width']}x{q['height']}"
|
|
||||||
# response = openai.Image.create(prompt=q["description"], n=1, size=size)
|
|
||||||
# image_url = response["data"][0]["url"]
|
|
||||||
# image_name = download_image(image_url)
|
|
||||||
# image_path = f"{PERSISTENT_DIR}{image_name}"
|
|
||||||
|
|
||||||
# file = discord.File(image_path, filename=image_name)
|
|
||||||
# embed = discord.Embed(title="Generated image")
|
|
||||||
# embed.set_image(url=f"attachment://{image_name}")
|
|
||||||
|
|
||||||
# call(channel.send(file=file, content=f"Here is what I have generated", embed=embed))
|
|
||||||
|
|
||||||
# return f"Image created: {response['data'][0]['url']}"
|
|
||||||
def download_image(url: str):
|
|
||||||
file_name = f"{datetime.now().strftime(FILE_NAME_FORMAT)}.jpg"
|
|
||||||
full_path = f"{PERSISTENT_DIR}{file_name}"
|
|
||||||
urllib.request.urlretrieve(url, full_path)
|
|
||||||
return file_name
|
|
||||||
|
|
||||||
|
|
||||||
### Agent capabilities
|
|
||||||
### These functions are called by the agent to perform actions
|
|
||||||
###
|
|
||||||
def save(memory, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(memory)
|
|
||||||
print(">>> saving to memories: ")
|
|
||||||
print(q["content"])
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client = Chroma(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, persist_directory=PERSISTENT_DIR)
|
|
||||||
else:
|
|
||||||
chroma_client = Milvus(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
chroma_client.add_texts([q["content"]],[{"id": str(uuid.uuid4())}])
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client.persist()
|
|
||||||
chroma_client = None
|
|
||||||
return f"The object was saved permanently to memory."
|
|
||||||
|
|
||||||
def search_memory(query, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(query)
|
|
||||||
if MILVUS_HOST == "":
|
|
||||||
chroma_client = Chroma(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, persist_directory=PERSISTENT_DIR)
|
|
||||||
else:
|
|
||||||
chroma_client = Milvus(collection_name=MEMORY_COLLECTION,embedding_function=embeddings, connection_args={"host": MILVUS_HOST, "port": MILVUS_PORT})
|
|
||||||
#docs = chroma_client.search(q["keywords"], "mmr")
|
|
||||||
retriever = chroma_client.as_retriever(search_type=MEMORY_SEARCH_TYPE, search_kwargs={"k": MEMORY_RESULTS})
|
|
||||||
|
|
||||||
docs = retriever.get_relevant_documents(q["keywords"])
|
|
||||||
text_res="Memories found in the database:\n"
|
|
||||||
|
|
||||||
sources = set() # To store unique sources
|
|
||||||
|
|
||||||
# Collect unique sources
|
|
||||||
for document in docs:
|
|
||||||
if "source" in document.metadata:
|
|
||||||
sources.add(document.metadata["source"])
|
|
||||||
|
|
||||||
for doc in docs:
|
|
||||||
# drop newlines from page_content
|
|
||||||
content = doc.page_content.replace("\n", " ")
|
|
||||||
content = " ".join(content.split())
|
|
||||||
text_res+="- "+content+"\n"
|
|
||||||
|
|
||||||
# Print the relevant sources used for the answer
|
|
||||||
for source in sources:
|
|
||||||
if source.startswith("http"):
|
|
||||||
text_res += "" + source + "\n"
|
|
||||||
|
|
||||||
chroma_client = None
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
return text_res
|
|
||||||
#return localagi.post_process(text_res)
|
|
||||||
|
|
||||||
# write file to disk with content
|
|
||||||
def save_file(arg, agent_actions={}, localagi=None):
|
|
||||||
arg = json.loads(arg)
|
|
||||||
file = filename = arg["filename"]
|
|
||||||
content = arg["content"]
|
|
||||||
# create persistent dir if does not exist
|
|
||||||
if not os.path.exists(PERSISTENT_DIR):
|
|
||||||
os.makedirs(PERSISTENT_DIR)
|
|
||||||
# write the file in the directory specified
|
|
||||||
file = os.path.join(PERSISTENT_DIR, filename)
|
|
||||||
|
|
||||||
# Check if the file already exists
|
|
||||||
if os.path.exists(file):
|
|
||||||
mode = 'a' # Append mode
|
|
||||||
else:
|
|
||||||
mode = 'w' # Write mode
|
|
||||||
|
|
||||||
with open(file, mode) as f:
|
|
||||||
f.write(content)
|
|
||||||
|
|
||||||
file = discord.File(file, filename=filename)
|
|
||||||
call(channel.send(file=file, content=f"Here is what I have generated"))
|
|
||||||
return f"File {file} saved successfully."
|
|
||||||
|
|
||||||
def ddg(query: str, num_results: int, backend: str = "api") -> List[Dict[str, str]]:
|
|
||||||
"""Run query through DuckDuckGo and return metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
query: The query to search for.
|
|
||||||
num_results: The number of results to return.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list of dictionaries with the following keys:
|
|
||||||
snippet - The description of the result.
|
|
||||||
title - The title of the result.
|
|
||||||
link - The link to the result.
|
|
||||||
"""
|
|
||||||
ddgs = DDGS()
|
|
||||||
try:
|
|
||||||
results = ddgs.text(
|
|
||||||
query,
|
|
||||||
backend=backend,
|
|
||||||
)
|
|
||||||
if results is None:
|
|
||||||
return [{"Result": "No good DuckDuckGo Search Result was found"}]
|
|
||||||
|
|
||||||
def to_metadata(result: Dict) -> Dict[str, str]:
|
|
||||||
if backend == "news":
|
|
||||||
return {
|
|
||||||
"date": result["date"],
|
|
||||||
"title": result["title"],
|
|
||||||
"snippet": result["body"],
|
|
||||||
"source": result["source"],
|
|
||||||
"link": result["url"],
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
"snippet": result["body"],
|
|
||||||
"title": result["title"],
|
|
||||||
"link": result["href"],
|
|
||||||
}
|
|
||||||
|
|
||||||
formatted_results = []
|
|
||||||
for i, res in enumerate(results, 1):
|
|
||||||
if res is not None:
|
|
||||||
formatted_results.append(to_metadata(res))
|
|
||||||
if len(formatted_results) == num_results:
|
|
||||||
break
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
return []
|
|
||||||
return formatted_results
|
|
||||||
|
|
||||||
## Search on duckduckgo
|
|
||||||
def search_duckduckgo(a, agent_actions={}, localagi=None):
|
|
||||||
a = json.loads(a)
|
|
||||||
list=ddg(a["query"], 2)
|
|
||||||
|
|
||||||
text_res=""
|
|
||||||
for doc in list:
|
|
||||||
text_res+=f"""{doc["link"]}: {doc["title"]} {doc["snippet"]}\n"""
|
|
||||||
print("Found")
|
|
||||||
print(text_res)
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
return text_res
|
|
||||||
#l = json.dumps(list)
|
|
||||||
#return l
|
|
||||||
|
|
||||||
### End Agent capabilities
|
|
||||||
###
|
|
||||||
|
|
||||||
### Agent action definitions
|
|
||||||
agent_actions = {
|
|
||||||
# "generate_picture": {
|
|
||||||
# "function": create_image,
|
|
||||||
# "plannable": True,
|
|
||||||
# "description": 'For creating a picture, the assistant replies with "generate_picture" and a detailed description, enhancing it with as much detail as possible.',
|
|
||||||
# "signature": {
|
|
||||||
# "name": "generate_picture",
|
|
||||||
# "parameters": {
|
|
||||||
# "type": "object",
|
|
||||||
# "properties": {
|
|
||||||
# "description": {
|
|
||||||
# "type": "string",
|
|
||||||
# },
|
|
||||||
# "width": {
|
|
||||||
# "type": "number",
|
|
||||||
# },
|
|
||||||
# "height": {
|
|
||||||
# "type": "number",
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
# }
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
"search_internet": {
|
|
||||||
"function": search_duckduckgo,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'For searching the internet with a query, the assistant replies with the action "search_internet" and the query to search.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_internet",
|
|
||||||
"description": """For searching internet.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"query": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_file": {
|
|
||||||
"function": save_file,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_file", the filename and content to save for writing a file to disk permanently. This can be used to store the result of complex actions locally.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_file",
|
|
||||||
"description": """For saving a file to disk with content.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"filename": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"ingest": {
|
|
||||||
"function": ingest,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "ingest" when there is an url to a sitemap to ingest memories from.',
|
|
||||||
"signature": {
|
|
||||||
"name": "ingest",
|
|
||||||
"description": """Save or store informations into memory.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"url": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["url"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_memory": {
|
|
||||||
"function": save,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_memory" and the string to remember or store an information that thinks it is relevant permanently.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_memory",
|
|
||||||
"description": """Save or store informations into memory.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["content"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"search_memory": {
|
|
||||||
"function": search_memory,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "search_memory" for searching between its memories with a query term.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_memory",
|
|
||||||
"description": """Search in memory""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"keywords": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "reasoning behind the intent"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["keywords"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def localagi(q):
|
|
||||||
localagi = LocalAGI(
|
|
||||||
agent_actions=agent_actions,
|
|
||||||
llm_model=LLM_MODEL,
|
|
||||||
tts_model=VOICE_MODEL,
|
|
||||||
tts_api_base=TTS_API_BASE,
|
|
||||||
functions_model=FUNCTIONS_MODEL,
|
|
||||||
api_base=LOCALAI_API_BASE,
|
|
||||||
stablediffusion_api_base=IMAGE_API_BASE,
|
|
||||||
stablediffusion_model=STABLEDIFFUSION_MODEL,
|
|
||||||
)
|
|
||||||
conversation_history = []
|
|
||||||
|
|
||||||
conversation_history=localagi.evaluate(
|
|
||||||
q,
|
|
||||||
conversation_history,
|
|
||||||
critic=False,
|
|
||||||
re_evaluate=False,
|
|
||||||
# Enable to lower context usage but increases LLM calls
|
|
||||||
postprocess=False,
|
|
||||||
subtaskContext=True,
|
|
||||||
)
|
|
||||||
return conversation_history[-1]["content"]
|
|
||||||
@@ -1,403 +0,0 @@
|
|||||||
import logging
|
|
||||||
import re
|
|
||||||
import time
|
|
||||||
|
|
||||||
from openai.error import Timeout
|
|
||||||
from slack_bolt import App, Ack, BoltContext, BoltResponse
|
|
||||||
from slack_bolt.request.payload_utils import is_event
|
|
||||||
from slack_sdk.web import WebClient
|
|
||||||
|
|
||||||
from app.env import (
|
|
||||||
OPENAI_TIMEOUT_SECONDS,
|
|
||||||
SYSTEM_TEXT,
|
|
||||||
TRANSLATE_MARKDOWN,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
from app.i18n import translate
|
|
||||||
from app.openai_ops import (
|
|
||||||
ask_llm,
|
|
||||||
format_openai_message_content,
|
|
||||||
build_system_text,
|
|
||||||
)
|
|
||||||
from app.slack_ops import find_parent_message, is_no_mention_thread, post_wip_message, update_wip_message
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Listener functions
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def just_ack(ack: Ack):
|
|
||||||
ack()
|
|
||||||
|
|
||||||
|
|
||||||
TIMEOUT_ERROR_MESSAGE = (
|
|
||||||
f":warning: Sorry! It looks like OpenAI didn't respond within {OPENAI_TIMEOUT_SECONDS} seconds. "
|
|
||||||
"Please try again later. :bow:"
|
|
||||||
)
|
|
||||||
DEFAULT_LOADING_TEXT = ":hourglass_flowing_sand: Wait a second, please ..."
|
|
||||||
|
|
||||||
|
|
||||||
def respond_to_app_mention(
|
|
||||||
context: BoltContext,
|
|
||||||
payload: dict,
|
|
||||||
client: WebClient,
|
|
||||||
logger: logging.Logger,
|
|
||||||
):
|
|
||||||
if payload.get("thread_ts") is not None:
|
|
||||||
parent_message = find_parent_message(
|
|
||||||
client, context.channel_id, payload.get("thread_ts")
|
|
||||||
)
|
|
||||||
if parent_message is not None:
|
|
||||||
if is_no_mention_thread(context, parent_message):
|
|
||||||
# The message event handler will reply to this
|
|
||||||
return
|
|
||||||
|
|
||||||
wip_reply = None
|
|
||||||
# Replace placeholder for Slack user ID in the system prompt
|
|
||||||
system_text = build_system_text(SYSTEM_TEXT, TRANSLATE_MARKDOWN, context)
|
|
||||||
messages = [{"role": "system", "content": system_text}]
|
|
||||||
|
|
||||||
print("system text:"+system_text, flush=True)
|
|
||||||
|
|
||||||
openai_api_key = context.get("OPENAI_API_KEY")
|
|
||||||
try:
|
|
||||||
if openai_api_key is None:
|
|
||||||
client.chat_postMessage(
|
|
||||||
channel=context.channel_id,
|
|
||||||
text="To use this app, please configure your OpenAI API key first",
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
user_id = context.actor_user_id or context.user_id
|
|
||||||
content = ""
|
|
||||||
if payload.get("thread_ts") is not None:
|
|
||||||
# Mentioning the bot user in a thread
|
|
||||||
replies_in_thread = client.conversations_replies(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=payload.get("thread_ts"),
|
|
||||||
include_all_metadata=True,
|
|
||||||
limit=1000,
|
|
||||||
).get("messages", [])
|
|
||||||
reply = replies_in_thread[-1]
|
|
||||||
for reply in replies_in_thread:
|
|
||||||
c = reply["text"]+"\n\n"
|
|
||||||
content += c
|
|
||||||
role = "assistant" if reply["user"] == context.bot_user_id else "user"
|
|
||||||
messages.append(
|
|
||||||
{
|
|
||||||
"role": role,
|
|
||||||
"content": (
|
|
||||||
format_openai_message_content(
|
|
||||||
content, TRANSLATE_MARKDOWN
|
|
||||||
)
|
|
||||||
),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Strip bot Slack user ID from initial message
|
|
||||||
msg_text = re.sub(f"<@{context.bot_user_id}>\\s*", "", payload["text"])
|
|
||||||
messages.append(
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": format_openai_message_content(msg_text, TRANSLATE_MARKDOWN),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
loading_text = translate(
|
|
||||||
openai_api_key=openai_api_key, context=context, text=DEFAULT_LOADING_TEXT
|
|
||||||
)
|
|
||||||
wip_reply = post_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
thread_ts=payload["ts"],
|
|
||||||
loading_text=loading_text,
|
|
||||||
messages=messages,
|
|
||||||
user=context.user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
resp = ask_llm(messages=messages)
|
|
||||||
print("Reply "+resp)
|
|
||||||
|
|
||||||
update_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=resp,
|
|
||||||
messages=messages,
|
|
||||||
user=user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
except Timeout:
|
|
||||||
if wip_reply is not None:
|
|
||||||
text = (
|
|
||||||
(
|
|
||||||
wip_reply.get("message", {}).get("text", "")
|
|
||||||
if wip_reply is not None
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
+ "\n\n"
|
|
||||||
+ translate(
|
|
||||||
openai_api_key=openai_api_key,
|
|
||||||
context=context,
|
|
||||||
text=TIMEOUT_ERROR_MESSAGE,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
client.chat_update(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=text,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
text = (
|
|
||||||
(
|
|
||||||
wip_reply.get("message", {}).get("text", "")
|
|
||||||
if wip_reply is not None
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
+ "\n\n"
|
|
||||||
+ translate(
|
|
||||||
openai_api_key=openai_api_key,
|
|
||||||
context=context,
|
|
||||||
text=f":warning: Failed to start a conversation with ChatGPT: {e}",
|
|
||||||
)
|
|
||||||
)
|
|
||||||
logger.exception(text, e)
|
|
||||||
if wip_reply is not None:
|
|
||||||
client.chat_update(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=text,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def respond_to_new_message(
|
|
||||||
context: BoltContext,
|
|
||||||
payload: dict,
|
|
||||||
client: WebClient,
|
|
||||||
logger: logging.Logger,
|
|
||||||
):
|
|
||||||
if payload.get("bot_id") is not None and payload.get("bot_id") != context.bot_id:
|
|
||||||
# Skip a new message by a different app
|
|
||||||
return
|
|
||||||
|
|
||||||
wip_reply = None
|
|
||||||
try:
|
|
||||||
is_in_dm_with_bot = payload.get("channel_type") == "im"
|
|
||||||
is_no_mention_required = False
|
|
||||||
thread_ts = payload.get("thread_ts")
|
|
||||||
if is_in_dm_with_bot is False and thread_ts is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
openai_api_key = context.get("OPENAI_API_KEY")
|
|
||||||
if openai_api_key is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
messages_in_context = []
|
|
||||||
if is_in_dm_with_bot is True and thread_ts is None:
|
|
||||||
# In the DM with the bot
|
|
||||||
past_messages = client.conversations_history(
|
|
||||||
channel=context.channel_id,
|
|
||||||
include_all_metadata=True,
|
|
||||||
limit=100,
|
|
||||||
).get("messages", [])
|
|
||||||
past_messages.reverse()
|
|
||||||
# Remove old messages
|
|
||||||
for message in past_messages:
|
|
||||||
seconds = time.time() - float(message.get("ts"))
|
|
||||||
if seconds < 86400: # less than 1 day
|
|
||||||
messages_in_context.append(message)
|
|
||||||
is_no_mention_required = True
|
|
||||||
else:
|
|
||||||
# In a thread with the bot in a channel
|
|
||||||
messages_in_context = client.conversations_replies(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=thread_ts,
|
|
||||||
include_all_metadata=True,
|
|
||||||
limit=1000,
|
|
||||||
).get("messages", [])
|
|
||||||
if is_in_dm_with_bot is True:
|
|
||||||
is_no_mention_required = True
|
|
||||||
else:
|
|
||||||
the_parent_message_found = False
|
|
||||||
for message in messages_in_context:
|
|
||||||
if message.get("ts") == thread_ts:
|
|
||||||
the_parent_message_found = True
|
|
||||||
is_no_mention_required = is_no_mention_thread(context, message)
|
|
||||||
break
|
|
||||||
if the_parent_message_found is False:
|
|
||||||
parent_message = find_parent_message(
|
|
||||||
client, context.channel_id, thread_ts
|
|
||||||
)
|
|
||||||
if parent_message is not None:
|
|
||||||
is_no_mention_required = is_no_mention_thread(
|
|
||||||
context, parent_message
|
|
||||||
)
|
|
||||||
|
|
||||||
messages = []
|
|
||||||
user_id = context.actor_user_id or context.user_id
|
|
||||||
last_assistant_idx = -1
|
|
||||||
indices_to_remove = []
|
|
||||||
for idx, reply in enumerate(messages_in_context):
|
|
||||||
maybe_event_type = reply.get("metadata", {}).get("event_type")
|
|
||||||
if maybe_event_type == "chat-gpt-convo":
|
|
||||||
if context.bot_id != reply.get("bot_id"):
|
|
||||||
# Remove messages by a different app
|
|
||||||
indices_to_remove.append(idx)
|
|
||||||
continue
|
|
||||||
maybe_new_messages = (
|
|
||||||
reply.get("metadata", {}).get("event_payload", {}).get("messages")
|
|
||||||
)
|
|
||||||
if maybe_new_messages is not None:
|
|
||||||
if len(messages) == 0 or user_id is None:
|
|
||||||
new_user_id = (
|
|
||||||
reply.get("metadata", {})
|
|
||||||
.get("event_payload", {})
|
|
||||||
.get("user")
|
|
||||||
)
|
|
||||||
if new_user_id is not None:
|
|
||||||
user_id = new_user_id
|
|
||||||
messages = maybe_new_messages
|
|
||||||
last_assistant_idx = idx
|
|
||||||
|
|
||||||
if is_no_mention_required is False:
|
|
||||||
return
|
|
||||||
if is_in_dm_with_bot is False and last_assistant_idx == -1:
|
|
||||||
return
|
|
||||||
|
|
||||||
if is_in_dm_with_bot is True:
|
|
||||||
# To know whether this app needs to start a new convo
|
|
||||||
if not next(filter(lambda msg: msg["role"] == "system", messages), None):
|
|
||||||
# Replace placeholder for Slack user ID in the system prompt
|
|
||||||
system_text = build_system_text(
|
|
||||||
SYSTEM_TEXT, TRANSLATE_MARKDOWN, context
|
|
||||||
)
|
|
||||||
messages.insert(0, {"role": "system", "content": system_text})
|
|
||||||
|
|
||||||
filtered_messages_in_context = []
|
|
||||||
for idx, reply in enumerate(messages_in_context):
|
|
||||||
# Strip bot Slack user ID from initial message
|
|
||||||
if idx == 0:
|
|
||||||
reply["text"] = re.sub(
|
|
||||||
f"<@{context.bot_user_id}>\\s*", "", reply["text"]
|
|
||||||
)
|
|
||||||
if idx not in indices_to_remove:
|
|
||||||
filtered_messages_in_context.append(reply)
|
|
||||||
if len(filtered_messages_in_context) == 0:
|
|
||||||
return
|
|
||||||
|
|
||||||
for reply in filtered_messages_in_context:
|
|
||||||
msg_user_id = reply.get("user")
|
|
||||||
messages.append(
|
|
||||||
{
|
|
||||||
"content": format_openai_message_content(
|
|
||||||
reply.get("text"), TRANSLATE_MARKDOWN
|
|
||||||
),
|
|
||||||
"role": "user",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
loading_text = translate(
|
|
||||||
openai_api_key=openai_api_key, context=context, text=DEFAULT_LOADING_TEXT
|
|
||||||
)
|
|
||||||
wip_reply = post_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
thread_ts=payload.get("thread_ts") if is_in_dm_with_bot else payload["ts"],
|
|
||||||
loading_text=loading_text,
|
|
||||||
messages=messages,
|
|
||||||
user=user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
latest_replies = client.conversations_replies(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply.get("ts"),
|
|
||||||
include_all_metadata=True,
|
|
||||||
limit=1000,
|
|
||||||
)
|
|
||||||
if latest_replies.get("messages", [])[-1]["ts"] != wip_reply["message"]["ts"]:
|
|
||||||
# Since a new reply will come soon, this app abandons this reply
|
|
||||||
client.chat_delete(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
resp = ask_llm(messages=messages)
|
|
||||||
print("Reply "+resp)
|
|
||||||
update_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=resp,
|
|
||||||
messages=messages,
|
|
||||||
user=user_id,
|
|
||||||
)
|
|
||||||
except Timeout:
|
|
||||||
if wip_reply is not None:
|
|
||||||
text = (
|
|
||||||
(
|
|
||||||
wip_reply.get("message", {}).get("text", "")
|
|
||||||
if wip_reply is not None
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
+ "\n\n"
|
|
||||||
+ translate(
|
|
||||||
openai_api_key=openai_api_key,
|
|
||||||
context=context,
|
|
||||||
text=TIMEOUT_ERROR_MESSAGE,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
client.chat_update(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=text,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
text = (
|
|
||||||
(
|
|
||||||
wip_reply.get("message", {}).get("text", "")
|
|
||||||
if wip_reply is not None
|
|
||||||
else ""
|
|
||||||
)
|
|
||||||
+ "\n\n"
|
|
||||||
+ f":warning: Failed to reply: {e}"
|
|
||||||
)
|
|
||||||
logger.exception(text, e)
|
|
||||||
if wip_reply is not None:
|
|
||||||
client.chat_update(
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=text,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def register_listeners(app: App):
|
|
||||||
app.event("app_mention")(ack=just_ack, lazy=[respond_to_app_mention])
|
|
||||||
# app.event("message")(ack=just_ack, lazy=[respond_to_new_message])
|
|
||||||
|
|
||||||
|
|
||||||
MESSAGE_SUBTYPES_TO_SKIP = ["message_changed", "message_deleted"]
|
|
||||||
|
|
||||||
|
|
||||||
# To reduce unnecessary workload in this app,
|
|
||||||
# this before_authorize function skips message changed/deleted events.
|
|
||||||
# Especially, "message_changed" events can be triggered many times when the app rapidly updates its reply.
|
|
||||||
def before_authorize(
|
|
||||||
body: dict,
|
|
||||||
payload: dict,
|
|
||||||
logger: logging.Logger,
|
|
||||||
next_,
|
|
||||||
):
|
|
||||||
if (
|
|
||||||
is_event(body)
|
|
||||||
and payload.get("type") == "message"
|
|
||||||
and payload.get("subtype") in MESSAGE_SUBTYPES_TO_SKIP
|
|
||||||
):
|
|
||||||
logger.debug(
|
|
||||||
"Skipped the following middleware and listeners "
|
|
||||||
f"for this message event (subtype: {payload.get('subtype')})"
|
|
||||||
)
|
|
||||||
return BoltResponse(status=200, body="")
|
|
||||||
next_()
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
DEFAULT_SYSTEM_TEXT = """
|
|
||||||
"""
|
|
||||||
|
|
||||||
SYSTEM_TEXT = os.environ.get("OPENAI_SYSTEM_TEXT", DEFAULT_SYSTEM_TEXT)
|
|
||||||
|
|
||||||
DEFAULT_OPENAI_TIMEOUT_SECONDS = 30
|
|
||||||
OPENAI_TIMEOUT_SECONDS = int(
|
|
||||||
os.environ.get("OPENAI_TIMEOUT_SECONDS", DEFAULT_OPENAI_TIMEOUT_SECONDS)
|
|
||||||
)
|
|
||||||
|
|
||||||
DEFAULT_OPENAI_MODEL = "gpt-3.5-turbo"
|
|
||||||
OPENAI_MODEL = os.environ.get("OPENAI_MODEL", DEFAULT_OPENAI_MODEL)
|
|
||||||
|
|
||||||
USE_SLACK_LANGUAGE = os.environ.get("USE_SLACK_LANGUAGE", "true") == "true"
|
|
||||||
|
|
||||||
SLACK_APP_LOG_LEVEL = os.environ.get("SLACK_APP_LOG_LEVEL", "DEBUG")
|
|
||||||
|
|
||||||
TRANSLATE_MARKDOWN = os.environ.get("TRANSLATE_MARKDOWN", "false") == "true"
|
|
||||||
|
|
||||||
BASE_PATH = os.environ.get('OPENAI_API_BASE', 'http://localhost:8080/v1')
|
|
||||||
|
|
||||||
EMBEDDINGS_MODEL = os.environ.get('EMBEDDINGS_MODEL', "all-MiniLM-L6-v2")
|
|
||||||
|
|
||||||
|
|
||||||
EMBEDDINGS_API_BASE = os.environ.get("EMBEDDINGS_API_BASE", BASE_PATH)
|
|
||||||
LOCALAI_API_BASE = os.environ.get("LOCALAI_API_BASE", BASE_PATH)
|
|
||||||
TTS_API_BASE = os.environ.get("TTS_API_BASE", BASE_PATH)
|
|
||||||
IMAGE_API_BASE = os.environ.get("IMAGES_API_BASE", BASE_PATH)
|
|
||||||
|
|
||||||
STABLEDIFFUSION_MODEL = os.environ.get("STABLEDIFFUSION_MODEL", "dreamshaper")
|
|
||||||
FUNCTIONS_MODEL = os.environ.get("FUNCTIONS_MODEL", OPENAI_MODEL)
|
|
||||||
LLM_MODEL = os.environ.get("LLM_MODEL", OPENAI_MODEL)
|
|
||||||
VOICE_MODEL= os.environ.get("TTS_MODEL", "en-us-kathleen-low.onnx" )
|
|
||||||
PERSISTENT_DIR = os.environ.get("PERSISTENT_DIR", "/data")
|
|
||||||
MILVUS_HOST = os.environ.get("MILVUS_HOST", "")
|
|
||||||
MILVUS_PORT = os.environ.get("MILVUS_PORT", 0)
|
|
||||||
MEMORY_COLLECTION = os.environ.get("MEMORY_COLLECTION", "local")
|
|
||||||
MEMORY_CHUNK_SIZE = os.environ.get("MEMORY_CHUNK_SIZE", 600)
|
|
||||||
MEMORY_CHUNK_OVERLAP = os.environ.get("MEMORY_RESULTS", 110)
|
|
||||||
MEMORY_RESULTS = os.environ.get("MEMORY_RESULTS", 3)
|
|
||||||
MEMORY_SEARCH_TYPE = os.environ.get("MEMORY_SEARCH_TYPE", "mmr")
|
|
||||||
@@ -1,75 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
|
|
||||||
import openai
|
|
||||||
from slack_bolt import BoltContext
|
|
||||||
|
|
||||||
from .openai_ops import GPT_3_5_TURBO_0301_MODEL
|
|
||||||
|
|
||||||
# All the supported languages for Slack app as of March 2023
|
|
||||||
_locale_to_lang = {
|
|
||||||
"en-US": "English",
|
|
||||||
"en-GB": "English",
|
|
||||||
"de-DE": "German",
|
|
||||||
"es-ES": "Spanish",
|
|
||||||
"es-LA": "Spanish",
|
|
||||||
"fr-FR": "French",
|
|
||||||
"it-IT": "Italian",
|
|
||||||
"pt-BR": "Portuguese",
|
|
||||||
"ru-RU": "Russian",
|
|
||||||
"ja-JP": "Japanese",
|
|
||||||
"zh-CN": "Chinese",
|
|
||||||
"zh-TW": "Chinese",
|
|
||||||
"ko-KR": "Korean",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def from_locale_to_lang(locale: Optional[str]) -> Optional[str]:
|
|
||||||
if locale is None:
|
|
||||||
return None
|
|
||||||
return _locale_to_lang.get(locale)
|
|
||||||
|
|
||||||
|
|
||||||
_translation_result_cache = {}
|
|
||||||
|
|
||||||
|
|
||||||
def translate(*, openai_api_key: str, context: BoltContext, text: str) -> str:
|
|
||||||
lang = from_locale_to_lang(context.get("locale"))
|
|
||||||
if lang is None or lang == "English":
|
|
||||||
return text
|
|
||||||
|
|
||||||
cached_result = _translation_result_cache.get(f"{lang}:{text}")
|
|
||||||
if cached_result is not None:
|
|
||||||
return cached_result
|
|
||||||
response = openai.ChatCompletion.create(
|
|
||||||
api_key=openai_api_key,
|
|
||||||
model=GPT_3_5_TURBO_0301_MODEL,
|
|
||||||
messages=[
|
|
||||||
{
|
|
||||||
"role": "system",
|
|
||||||
"content": "You're the AI model that primarily focuses on the quality of language translation. "
|
|
||||||
"You must not change the meaning of sentences when translating them into a different language. "
|
|
||||||
"You must provide direct translation result as much as possible. "
|
|
||||||
"When the given text is a single verb/noun, its translated text must be a norm/verb form too. "
|
|
||||||
"Slack's emoji (e.g., :hourglass_flowing_sand:) and mention parts must be kept as-is. "
|
|
||||||
"Your response must not include any additional notes in English. "
|
|
||||||
"Your response must omit English version / pronunciation guide for the result. ",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"role": "user",
|
|
||||||
"content": f"Can you translate {text} into {lang} in a professional tone? "
|
|
||||||
"Please respond with the only the translated text in a format suitable for Slack user interface. "
|
|
||||||
"No need to append any English notes and guides.",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
top_p=1,
|
|
||||||
n=1,
|
|
||||||
max_tokens=1024,
|
|
||||||
temperature=1,
|
|
||||||
presence_penalty=0,
|
|
||||||
frequency_penalty=0,
|
|
||||||
logit_bias={},
|
|
||||||
user="system",
|
|
||||||
)
|
|
||||||
translated_text = response["choices"][0]["message"].get("content")
|
|
||||||
_translation_result_cache[f"{lang}:{text}"] = translated_text
|
|
||||||
return translated_text
|
|
||||||
@@ -1,53 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
# Conversion from Slack mrkdwn to OpenAI markdown
|
|
||||||
# See also: https://api.slack.com/reference/surfaces/formatting#basics
|
|
||||||
def slack_to_markdown(content: str) -> str:
|
|
||||||
# Split the input string into parts based on code blocks and inline code
|
|
||||||
parts = re.split(r"(```.+?```|`[^`\n]+?`)", content)
|
|
||||||
|
|
||||||
# Apply the bold, italic, and strikethrough formatting to text not within code
|
|
||||||
result = ""
|
|
||||||
for part in parts:
|
|
||||||
if part.startswith("```") or part.startswith("`"):
|
|
||||||
result += part
|
|
||||||
else:
|
|
||||||
for o, n in [
|
|
||||||
(r"\*(?!\s)([^\*\n]+?)(?<!\s)\*", r"**\1**"), # *bold* to **bold**
|
|
||||||
(r"_(?!\s)([^_\n]+?)(?<!\s)_", r"*\1*"), # _italic_ to *italic*
|
|
||||||
(r"~(?!\s)([^~\n]+?)(?<!\s)~", r"~~\1~~"), # ~strike~ to ~~strike~~
|
|
||||||
]:
|
|
||||||
part = re.sub(o, n, part)
|
|
||||||
result += part
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
# Conversion from OpenAI markdown to Slack mrkdwn
|
|
||||||
# See also: https://api.slack.com/reference/surfaces/formatting#basics
|
|
||||||
def markdown_to_slack(content: str) -> str:
|
|
||||||
# Split the input string into parts based on code blocks and inline code
|
|
||||||
parts = re.split(r"(```.+?```|`[^`\n]+?`)", content)
|
|
||||||
|
|
||||||
# Apply the bold, italic, and strikethrough formatting to text not within code
|
|
||||||
result = ""
|
|
||||||
for part in parts:
|
|
||||||
if part.startswith("```") or part.startswith("`"):
|
|
||||||
result += part
|
|
||||||
else:
|
|
||||||
for o, n in [
|
|
||||||
(
|
|
||||||
r"\*\*\*(?!\s)([^\*\n]+?)(?<!\s)\*\*\*",
|
|
||||||
r"_*\1*_",
|
|
||||||
), # ***bold italic*** to *_bold italic_*
|
|
||||||
(
|
|
||||||
r"(?<![\*_])\*(?!\s)([^\*\n]+?)(?<!\s)\*(?![\*_])",
|
|
||||||
r"_\1_",
|
|
||||||
), # *italic* to _italic_
|
|
||||||
(r"\*\*(?!\s)([^\*\n]+?)(?<!\s)\*\*", r"*\1*"), # **bold** to *bold*
|
|
||||||
(r"__(?!\s)([^_\n]+?)(?<!\s)__", r"*\1*"), # __bold__ to *bold*
|
|
||||||
(r"~~(?!\s)([^~\n]+?)(?<!\s)~~", r"~\1~"), # ~~strike~~ to ~strike~
|
|
||||||
]:
|
|
||||||
part = re.sub(o, n, part)
|
|
||||||
result += part
|
|
||||||
return result
|
|
||||||
@@ -1,234 +0,0 @@
|
|||||||
import threading
|
|
||||||
import time
|
|
||||||
import re
|
|
||||||
from typing import List, Dict, Any, Generator
|
|
||||||
|
|
||||||
import openai
|
|
||||||
from openai.error import Timeout
|
|
||||||
from openai.openai_object import OpenAIObject
|
|
||||||
import tiktoken
|
|
||||||
|
|
||||||
from slack_bolt import BoltContext
|
|
||||||
from slack_sdk.web import WebClient
|
|
||||||
|
|
||||||
from app.markdown import slack_to_markdown, markdown_to_slack
|
|
||||||
from app.slack_ops import update_wip_message
|
|
||||||
|
|
||||||
from app.agent import (
|
|
||||||
localagi
|
|
||||||
)
|
|
||||||
|
|
||||||
# ----------------------------
|
|
||||||
# Internal functions
|
|
||||||
# ----------------------------
|
|
||||||
|
|
||||||
MAX_TOKENS = 1024
|
|
||||||
GPT_3_5_TURBO_0301_MODEL = "gpt-3.5-turbo-0301"
|
|
||||||
|
|
||||||
|
|
||||||
# Format message from Slack to send to OpenAI
|
|
||||||
def format_openai_message_content(content: str, translate_markdown: bool) -> str:
|
|
||||||
if content is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Unescape &, < and >, since Slack replaces these with their HTML equivalents
|
|
||||||
# See also: https://api.slack.com/reference/surfaces/formatting#escaping
|
|
||||||
content = content.replace("<", "<").replace(">", ">").replace("&", "&")
|
|
||||||
|
|
||||||
# Convert from Slack mrkdwn to markdown format
|
|
||||||
if translate_markdown:
|
|
||||||
content = slack_to_markdown(content)
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
def ask_llm(
|
|
||||||
*,
|
|
||||||
messages: List[Dict[str, str]],
|
|
||||||
) -> str:
|
|
||||||
# Remove old messages to make sure we have room for max_tokens
|
|
||||||
# See also: https://platform.openai.com/docs/guides/chat/introduction
|
|
||||||
# > total tokens must be below the model’s maximum limit (4096 tokens for gpt-3.5-turbo-0301)
|
|
||||||
# TODO: currently we don't pass gpt-4 to this calculation method
|
|
||||||
while calculate_num_tokens(messages) >= 4096 - MAX_TOKENS:
|
|
||||||
removed = False
|
|
||||||
for i, message in enumerate(messages):
|
|
||||||
if message["role"] in ("user", "assistant"):
|
|
||||||
del messages[i]
|
|
||||||
removed = True
|
|
||||||
break
|
|
||||||
if not removed:
|
|
||||||
# Fall through and let the OpenAI error handler deal with it
|
|
||||||
break
|
|
||||||
|
|
||||||
prompt=""
|
|
||||||
|
|
||||||
for i, message in enumerate(messages):
|
|
||||||
prompt += message["content"] + "\n"
|
|
||||||
|
|
||||||
return localagi(prompt)
|
|
||||||
|
|
||||||
def consume_openai_stream_to_write_reply(
|
|
||||||
*,
|
|
||||||
client: WebClient,
|
|
||||||
wip_reply: dict,
|
|
||||||
context: BoltContext,
|
|
||||||
user_id: str,
|
|
||||||
messages: List[Dict[str, str]],
|
|
||||||
steam: Generator[OpenAIObject, Any, None],
|
|
||||||
timeout_seconds: int,
|
|
||||||
translate_markdown: bool,
|
|
||||||
):
|
|
||||||
start_time = time.time()
|
|
||||||
assistant_reply: Dict[str, str] = {"role": "assistant", "content": ""}
|
|
||||||
messages.append(assistant_reply)
|
|
||||||
word_count = 0
|
|
||||||
threads = []
|
|
||||||
try:
|
|
||||||
loading_character = " ... :writing_hand:"
|
|
||||||
for chunk in steam:
|
|
||||||
spent_seconds = time.time() - start_time
|
|
||||||
if timeout_seconds < spent_seconds:
|
|
||||||
raise Timeout()
|
|
||||||
item = chunk.choices[0]
|
|
||||||
if item.get("finish_reason") is not None:
|
|
||||||
break
|
|
||||||
delta = item.get("delta")
|
|
||||||
if delta.get("content") is not None:
|
|
||||||
word_count += 1
|
|
||||||
assistant_reply["content"] += delta.get("content")
|
|
||||||
if word_count >= 20:
|
|
||||||
|
|
||||||
def update_message():
|
|
||||||
assistant_reply_text = format_assistant_reply(
|
|
||||||
assistant_reply["content"], translate_markdown
|
|
||||||
)
|
|
||||||
wip_reply["message"]["text"] = assistant_reply_text
|
|
||||||
update_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=assistant_reply_text + loading_character,
|
|
||||||
messages=messages,
|
|
||||||
user=user_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
thread = threading.Thread(target=update_message)
|
|
||||||
thread.daemon = True
|
|
||||||
thread.start()
|
|
||||||
threads.append(thread)
|
|
||||||
word_count = 0
|
|
||||||
|
|
||||||
for t in threads:
|
|
||||||
try:
|
|
||||||
if t.is_alive():
|
|
||||||
t.join()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
assistant_reply_text = format_assistant_reply(
|
|
||||||
assistant_reply["content"], translate_markdown
|
|
||||||
)
|
|
||||||
wip_reply["message"]["text"] = assistant_reply_text
|
|
||||||
update_wip_message(
|
|
||||||
client=client,
|
|
||||||
channel=context.channel_id,
|
|
||||||
ts=wip_reply["message"]["ts"],
|
|
||||||
text=assistant_reply_text,
|
|
||||||
messages=messages,
|
|
||||||
user=user_id,
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
for t in threads:
|
|
||||||
try:
|
|
||||||
if t.is_alive():
|
|
||||||
t.join()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
try:
|
|
||||||
steam.close()
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def calculate_num_tokens(
|
|
||||||
messages: List[Dict[str, str]],
|
|
||||||
# TODO: adjustment for gpt-4
|
|
||||||
model: str = GPT_3_5_TURBO_0301_MODEL,
|
|
||||||
) -> int:
|
|
||||||
"""Returns the number of tokens used by a list of messages."""
|
|
||||||
try:
|
|
||||||
encoding = tiktoken.encoding_for_model(model)
|
|
||||||
except KeyError:
|
|
||||||
encoding = tiktoken.get_encoding("cl100k_base")
|
|
||||||
if model == GPT_3_5_TURBO_0301_MODEL:
|
|
||||||
# note: future models may deviate from this
|
|
||||||
num_tokens = 0
|
|
||||||
for message in messages:
|
|
||||||
# every message follows <im_start>{role/name}\n{content}<im_end>\n
|
|
||||||
num_tokens += 4
|
|
||||||
for key, value in message.items():
|
|
||||||
num_tokens += len(encoding.encode(value))
|
|
||||||
if key == "name": # if there's a name, the role is omitted
|
|
||||||
num_tokens += -1 # role is always required and always 1 token
|
|
||||||
num_tokens += 2 # every reply is primed with <im_start>assistant
|
|
||||||
return num_tokens
|
|
||||||
else:
|
|
||||||
error = (
|
|
||||||
f"Calculating the number of tokens for for model {model} is not yet supported. "
|
|
||||||
"See https://github.com/openai/openai-python/blob/main/chatml.md "
|
|
||||||
"for information on how messages are converted to tokens."
|
|
||||||
)
|
|
||||||
raise NotImplementedError(error)
|
|
||||||
|
|
||||||
|
|
||||||
# Format message from OpenAI to display in Slack
|
|
||||||
def format_assistant_reply(content: str, translate_markdown: bool) -> str:
|
|
||||||
for o, n in [
|
|
||||||
# Remove leading newlines
|
|
||||||
("^\n+", ""),
|
|
||||||
# Remove prepended Slack user ID
|
|
||||||
("^<@U.*?>\\s?:\\s?", ""),
|
|
||||||
# Remove OpenAI syntax tags since Slack doesn't render them in a message
|
|
||||||
("```\\s*[Rr]ust\n", "```\n"),
|
|
||||||
("```\\s*[Rr]uby\n", "```\n"),
|
|
||||||
("```\\s*[Ss]cala\n", "```\n"),
|
|
||||||
("```\\s*[Kk]otlin\n", "```\n"),
|
|
||||||
("```\\s*[Jj]ava\n", "```\n"),
|
|
||||||
("```\\s*[Gg]o\n", "```\n"),
|
|
||||||
("```\\s*[Ss]wift\n", "```\n"),
|
|
||||||
("```\\s*[Oo]objective[Cc]\n", "```\n"),
|
|
||||||
("```\\s*[Cc]\n", "```\n"),
|
|
||||||
("```\\s*[Cc][+][+]\n", "```\n"),
|
|
||||||
("```\\s*[Cc][Pp][Pp]\n", "```\n"),
|
|
||||||
("```\\s*[Cc]sharp\n", "```\n"),
|
|
||||||
("```\\s*[Mm]atlab\n", "```\n"),
|
|
||||||
("```\\s*[Jj][Ss][Oo][Nn]\n", "```\n"),
|
|
||||||
("```\\s*[Ll]a[Tt]e[Xx]\n", "```\n"),
|
|
||||||
("```\\s*bash\n", "```\n"),
|
|
||||||
("```\\s*zsh\n", "```\n"),
|
|
||||||
("```\\s*sh\n", "```\n"),
|
|
||||||
("```\\s*[Ss][Qq][Ll]\n", "```\n"),
|
|
||||||
("```\\s*[Pp][Hh][Pp]\n", "```\n"),
|
|
||||||
("```\\s*[Pp][Ee][Rr][Ll]\n", "```\n"),
|
|
||||||
("```\\s*[Jj]ava[Ss]cript", "```\n"),
|
|
||||||
("```\\s*[Ty]ype[Ss]cript", "```\n"),
|
|
||||||
("```\\s*[Pp]ython\n", "```\n"),
|
|
||||||
]:
|
|
||||||
content = re.sub(o, n, content)
|
|
||||||
|
|
||||||
# Convert from OpenAI markdown to Slack mrkdwn format
|
|
||||||
if translate_markdown:
|
|
||||||
content = markdown_to_slack(content)
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
|
|
||||||
def build_system_text(
|
|
||||||
system_text_template: str, translate_markdown: bool, context: BoltContext
|
|
||||||
):
|
|
||||||
system_text = system_text_template.format(bot_user_id=context.bot_user_id)
|
|
||||||
# Translate format hint in system prompt
|
|
||||||
if translate_markdown is True:
|
|
||||||
system_text = slack_to_markdown(system_text)
|
|
||||||
return system_text
|
|
||||||
@@ -1,110 +0,0 @@
|
|||||||
from typing import Optional
|
|
||||||
from typing import List, Dict
|
|
||||||
|
|
||||||
from slack_sdk.web import WebClient, SlackResponse
|
|
||||||
from slack_bolt import BoltContext
|
|
||||||
|
|
||||||
# ----------------------------
|
|
||||||
# General operations in a channel
|
|
||||||
# ----------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def find_parent_message(
|
|
||||||
client: WebClient, channel_id: Optional[str], thread_ts: Optional[str]
|
|
||||||
) -> Optional[dict]:
|
|
||||||
if channel_id is None or thread_ts is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
messages = client.conversations_history(
|
|
||||||
channel=channel_id,
|
|
||||||
latest=thread_ts,
|
|
||||||
limit=1,
|
|
||||||
inclusive=1,
|
|
||||||
).get("messages", [])
|
|
||||||
|
|
||||||
return messages[0] if len(messages) > 0 else None
|
|
||||||
|
|
||||||
|
|
||||||
def is_no_mention_thread(context: BoltContext, parent_message: dict) -> bool:
|
|
||||||
parent_message_text = parent_message.get("text", "")
|
|
||||||
return f"<@{context.bot_user_id}>" in parent_message_text
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------
|
|
||||||
# WIP reply message stuff
|
|
||||||
# ----------------------------
|
|
||||||
|
|
||||||
|
|
||||||
def post_wip_message(
|
|
||||||
*,
|
|
||||||
client: WebClient,
|
|
||||||
channel: str,
|
|
||||||
thread_ts: str,
|
|
||||||
loading_text: str,
|
|
||||||
messages: List[Dict[str, str]],
|
|
||||||
user: str,
|
|
||||||
) -> SlackResponse:
|
|
||||||
system_messages = [msg for msg in messages if msg["role"] == "system"]
|
|
||||||
return client.chat_postMessage(
|
|
||||||
channel=channel,
|
|
||||||
thread_ts=thread_ts,
|
|
||||||
text=loading_text,
|
|
||||||
metadata={
|
|
||||||
"event_type": "chat-gpt-convo",
|
|
||||||
"event_payload": {"messages": system_messages, "user": user},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_wip_message(
|
|
||||||
client: WebClient,
|
|
||||||
channel: str,
|
|
||||||
ts: str,
|
|
||||||
text: str,
|
|
||||||
messages: List[Dict[str, str]],
|
|
||||||
user: str,
|
|
||||||
) -> SlackResponse:
|
|
||||||
system_messages = [msg for msg in messages if msg["role"] == "system"]
|
|
||||||
return client.chat_update(
|
|
||||||
channel=channel,
|
|
||||||
ts=ts,
|
|
||||||
text=text,
|
|
||||||
metadata={
|
|
||||||
"event_type": "chat-gpt-convo",
|
|
||||||
"event_payload": {"messages": system_messages, "user": user},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ----------------------------
|
|
||||||
# Home tab
|
|
||||||
# ----------------------------
|
|
||||||
|
|
||||||
DEFAULT_HOME_TAB_MESSAGE = (
|
|
||||||
"To enable this app in this Slack workspace, you need to save your OpenAI API key. "
|
|
||||||
"Visit <https://platform.openai.com/account/api-keys|your developer page> to grap your key!"
|
|
||||||
)
|
|
||||||
|
|
||||||
DEFAULT_HOME_TAB_CONFIGURE_LABEL = "Configure"
|
|
||||||
|
|
||||||
|
|
||||||
def build_home_tab(message: str, configure_label: str) -> dict:
|
|
||||||
return {
|
|
||||||
"type": "home",
|
|
||||||
"blocks": [
|
|
||||||
{
|
|
||||||
"type": "section",
|
|
||||||
"text": {
|
|
||||||
"type": "mrkdwn",
|
|
||||||
"text": message,
|
|
||||||
},
|
|
||||||
"accessory": {
|
|
||||||
"action_id": "configure",
|
|
||||||
"type": "button",
|
|
||||||
"text": {"type": "plain_text", "text": configure_label},
|
|
||||||
"style": "primary",
|
|
||||||
"value": "api_key",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
],
|
|
||||||
}
|
|
||||||
@@ -1,12 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
cd /app
|
|
||||||
|
|
||||||
pip uninstall hnswlib -y
|
|
||||||
|
|
||||||
git clone https://github.com/nmslib/hnswlib.git
|
|
||||||
cd hnswlib
|
|
||||||
pip install .
|
|
||||||
cd ..
|
|
||||||
|
|
||||||
python main.py
|
|
||||||
@@ -1,69 +0,0 @@
|
|||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
from slack_bolt import App, BoltContext
|
|
||||||
from slack_sdk.web import WebClient
|
|
||||||
from slack_sdk.http_retry.builtin_handlers import RateLimitErrorRetryHandler
|
|
||||||
|
|
||||||
from app.bolt_listeners import before_authorize, register_listeners
|
|
||||||
from app.env import *
|
|
||||||
from app.slack_ops import (
|
|
||||||
build_home_tab,
|
|
||||||
DEFAULT_HOME_TAB_MESSAGE,
|
|
||||||
DEFAULT_HOME_TAB_CONFIGURE_LABEL,
|
|
||||||
)
|
|
||||||
from app.i18n import translate
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
from slack_bolt.adapter.socket_mode import SocketModeHandler
|
|
||||||
|
|
||||||
logging.basicConfig(level=SLACK_APP_LOG_LEVEL)
|
|
||||||
|
|
||||||
app = App(
|
|
||||||
token=os.environ["SLACK_BOT_TOKEN"],
|
|
||||||
before_authorize=before_authorize,
|
|
||||||
process_before_response=True,
|
|
||||||
)
|
|
||||||
app.client.retry_handlers.append(RateLimitErrorRetryHandler(max_retry_count=2))
|
|
||||||
|
|
||||||
register_listeners(app)
|
|
||||||
|
|
||||||
@app.event("app_home_opened")
|
|
||||||
def render_home_tab(client: WebClient, context: BoltContext):
|
|
||||||
already_set_api_key = os.environ["OPENAI_API_KEY"]
|
|
||||||
text = translate(
|
|
||||||
openai_api_key=already_set_api_key,
|
|
||||||
context=context,
|
|
||||||
text=DEFAULT_HOME_TAB_MESSAGE,
|
|
||||||
)
|
|
||||||
configure_label = translate(
|
|
||||||
openai_api_key=already_set_api_key,
|
|
||||||
context=context,
|
|
||||||
text=DEFAULT_HOME_TAB_CONFIGURE_LABEL,
|
|
||||||
)
|
|
||||||
client.views_publish(
|
|
||||||
user_id=context.user_id,
|
|
||||||
view=build_home_tab(text, configure_label),
|
|
||||||
)
|
|
||||||
|
|
||||||
if USE_SLACK_LANGUAGE is True:
|
|
||||||
|
|
||||||
@app.middleware
|
|
||||||
def set_locale(
|
|
||||||
context: BoltContext,
|
|
||||||
client: WebClient,
|
|
||||||
next_,
|
|
||||||
):
|
|
||||||
user_id = context.actor_user_id or context.user_id
|
|
||||||
user_info = client.users_info(user=user_id, include_locale=True)
|
|
||||||
context["locale"] = user_info.get("user", {}).get("locale")
|
|
||||||
next_()
|
|
||||||
|
|
||||||
@app.middleware
|
|
||||||
def set_openai_api_key(context: BoltContext, next_):
|
|
||||||
context["OPENAI_API_KEY"] = os.environ["OPENAI_API_KEY"]
|
|
||||||
context["OPENAI_MODEL"] = OPENAI_MODEL
|
|
||||||
next_()
|
|
||||||
|
|
||||||
handler = SocketModeHandler(app, os.environ["SLACK_APP_TOKEN"])
|
|
||||||
handler.start()
|
|
||||||
@@ -1,306 +0,0 @@
|
|||||||
# Unzip the dependencies managed by serverless-python-requirements
|
|
||||||
try:
|
|
||||||
import unzip_requirements # type:ignore
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
#
|
|
||||||
# Imports
|
|
||||||
#
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import openai
|
|
||||||
|
|
||||||
from slack_sdk.web import WebClient
|
|
||||||
from slack_sdk.errors import SlackApiError
|
|
||||||
from slack_sdk.http_retry.builtin_handlers import RateLimitErrorRetryHandler
|
|
||||||
from slack_bolt import App, Ack, BoltContext
|
|
||||||
|
|
||||||
from app.bolt_listeners import register_listeners, before_authorize
|
|
||||||
from app.env import USE_SLACK_LANGUAGE, SLACK_APP_LOG_LEVEL, DEFAULT_OPENAI_MODEL
|
|
||||||
from app.slack_ops import (
|
|
||||||
build_home_tab,
|
|
||||||
DEFAULT_HOME_TAB_MESSAGE,
|
|
||||||
DEFAULT_HOME_TAB_CONFIGURE_LABEL,
|
|
||||||
)
|
|
||||||
from app.i18n import translate
|
|
||||||
|
|
||||||
#
|
|
||||||
# Product deployment (AWS Lambda)
|
|
||||||
#
|
|
||||||
# export SLACK_CLIENT_ID=
|
|
||||||
# export SLACK_CLIENT_SECRET=
|
|
||||||
# export SLACK_SIGNING_SECRET=
|
|
||||||
# export SLACK_SCOPES=app_mentions:read,channels:history,groups:history,im:history,mpim:history,chat:write.public,chat:write,users:read
|
|
||||||
# export SLACK_INSTALLATION_S3_BUCKET_NAME=
|
|
||||||
# export SLACK_STATE_S3_BUCKET_NAME=
|
|
||||||
# export OPENAI_S3_BUCKET_NAME=
|
|
||||||
# npm install -g serverless
|
|
||||||
# serverless plugin install -n serverless-python-requirements
|
|
||||||
# serverless deploy
|
|
||||||
#
|
|
||||||
|
|
||||||
import boto3
|
|
||||||
from slack_bolt.adapter.aws_lambda import SlackRequestHandler
|
|
||||||
from slack_bolt.adapter.aws_lambda.lambda_s3_oauth_flow import LambdaS3OAuthFlow
|
|
||||||
|
|
||||||
SlackRequestHandler.clear_all_log_handlers()
|
|
||||||
logging.basicConfig(format="%(asctime)s %(message)s", level=SLACK_APP_LOG_LEVEL)
|
|
||||||
|
|
||||||
s3_client = boto3.client("s3")
|
|
||||||
openai_bucket_name = os.environ["OPENAI_S3_BUCKET_NAME"]
|
|
||||||
|
|
||||||
client_template = WebClient()
|
|
||||||
client_template.retry_handlers.append(RateLimitErrorRetryHandler(max_retry_count=2))
|
|
||||||
|
|
||||||
|
|
||||||
def register_revocation_handlers(app: App):
|
|
||||||
# Handle uninstall events and token revocations
|
|
||||||
@app.event("tokens_revoked")
|
|
||||||
def handle_tokens_revoked_events(
|
|
||||||
event: dict,
|
|
||||||
context: BoltContext,
|
|
||||||
logger: logging.Logger,
|
|
||||||
):
|
|
||||||
user_ids = event.get("tokens", {}).get("oauth", [])
|
|
||||||
if len(user_ids) > 0:
|
|
||||||
for user_id in user_ids:
|
|
||||||
app.installation_store.delete_installation(
|
|
||||||
enterprise_id=context.enterprise_id,
|
|
||||||
team_id=context.team_id,
|
|
||||||
user_id=user_id,
|
|
||||||
)
|
|
||||||
bots = event.get("tokens", {}).get("bot", [])
|
|
||||||
if len(bots) > 0:
|
|
||||||
app.installation_store.delete_bot(
|
|
||||||
enterprise_id=context.enterprise_id,
|
|
||||||
team_id=context.team_id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
s3_client.delete_object(Bucket=openai_bucket_name, Key=context.team_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to delete an OpenAI auth key: (team_id: {context.team_id}, error: {e})"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.event("app_uninstalled")
|
|
||||||
def handle_app_uninstalled_events(
|
|
||||||
context: BoltContext,
|
|
||||||
logger: logging.Logger,
|
|
||||||
):
|
|
||||||
app.installation_store.delete_all(
|
|
||||||
enterprise_id=context.enterprise_id,
|
|
||||||
team_id=context.team_id,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
s3_client.delete_object(Bucket=openai_bucket_name, Key=context.team_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(
|
|
||||||
f"Failed to delete an OpenAI auth key: (team_id: {context.team_id}, error: {e})"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def handler(event, context_):
|
|
||||||
app = App(
|
|
||||||
process_before_response=True,
|
|
||||||
before_authorize=before_authorize,
|
|
||||||
oauth_flow=LambdaS3OAuthFlow(),
|
|
||||||
client=client_template,
|
|
||||||
)
|
|
||||||
app.oauth_flow.settings.install_page_rendering_enabled = False
|
|
||||||
register_listeners(app)
|
|
||||||
register_revocation_handlers(app)
|
|
||||||
|
|
||||||
if USE_SLACK_LANGUAGE is True:
|
|
||||||
|
|
||||||
@app.middleware
|
|
||||||
def set_locale(
|
|
||||||
context: BoltContext,
|
|
||||||
client: WebClient,
|
|
||||||
logger: logging.Logger,
|
|
||||||
next_,
|
|
||||||
):
|
|
||||||
bot_scopes = context.authorize_result.bot_scopes
|
|
||||||
if bot_scopes is not None and "users:read" in bot_scopes:
|
|
||||||
user_id = context.actor_user_id or context.user_id
|
|
||||||
try:
|
|
||||||
user_info = client.users_info(user=user_id, include_locale=True)
|
|
||||||
context["locale"] = user_info.get("user", {}).get("locale")
|
|
||||||
except SlackApiError as e:
|
|
||||||
logger.debug(f"Failed to fetch user info due to {e}")
|
|
||||||
pass
|
|
||||||
next_()
|
|
||||||
|
|
||||||
@app.middleware
|
|
||||||
def set_s3_openai_api_key(context: BoltContext, next_):
|
|
||||||
try:
|
|
||||||
s3_response = s3_client.get_object(
|
|
||||||
Bucket=openai_bucket_name, Key=context.team_id
|
|
||||||
)
|
|
||||||
config_str: str = s3_response["Body"].read().decode("utf-8")
|
|
||||||
if config_str.startswith("{"):
|
|
||||||
config = json.loads(config_str)
|
|
||||||
context["OPENAI_API_KEY"] = config.get("api_key")
|
|
||||||
context["OPENAI_MODEL"] = config.get("model")
|
|
||||||
else:
|
|
||||||
# The legacy data format
|
|
||||||
context["OPENAI_API_KEY"] = config_str
|
|
||||||
context["OPENAI_MODEL"] = DEFAULT_OPENAI_MODEL
|
|
||||||
except: # noqa: E722
|
|
||||||
context["OPENAI_API_KEY"] = None
|
|
||||||
next_()
|
|
||||||
|
|
||||||
@app.event("app_home_opened")
|
|
||||||
def render_home_tab(client: WebClient, context: BoltContext):
|
|
||||||
message = DEFAULT_HOME_TAB_MESSAGE
|
|
||||||
configure_label = DEFAULT_HOME_TAB_CONFIGURE_LABEL
|
|
||||||
try:
|
|
||||||
s3_client.get_object(Bucket=openai_bucket_name, Key=context.team_id)
|
|
||||||
message = "This app is ready to use in this workspace :raised_hands:"
|
|
||||||
except: # noqa: E722
|
|
||||||
pass
|
|
||||||
|
|
||||||
openai_api_key = context.get("OPENAI_API_KEY")
|
|
||||||
if openai_api_key is not None:
|
|
||||||
message = translate(
|
|
||||||
openai_api_key=openai_api_key, context=context, text=message
|
|
||||||
)
|
|
||||||
configure_label = translate(
|
|
||||||
openai_api_key=openai_api_key,
|
|
||||||
context=context,
|
|
||||||
text=DEFAULT_HOME_TAB_CONFIGURE_LABEL,
|
|
||||||
)
|
|
||||||
|
|
||||||
client.views_publish(
|
|
||||||
user_id=context.user_id,
|
|
||||||
view=build_home_tab(message, configure_label),
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.action("configure")
|
|
||||||
def handle_some_action(ack, body: dict, client: WebClient, context: BoltContext):
|
|
||||||
ack()
|
|
||||||
already_set_api_key = context.get("OPENAI_API_KEY")
|
|
||||||
api_key_text = "Save your OpenAI API key:"
|
|
||||||
submit = "Submit"
|
|
||||||
cancel = "Cancel"
|
|
||||||
if already_set_api_key is not None:
|
|
||||||
api_key_text = translate(
|
|
||||||
openai_api_key=already_set_api_key, context=context, text=api_key_text
|
|
||||||
)
|
|
||||||
submit = translate(
|
|
||||||
openai_api_key=already_set_api_key, context=context, text=submit
|
|
||||||
)
|
|
||||||
cancel = translate(
|
|
||||||
openai_api_key=already_set_api_key, context=context, text=cancel
|
|
||||||
)
|
|
||||||
|
|
||||||
client.views_open(
|
|
||||||
trigger_id=body["trigger_id"],
|
|
||||||
view={
|
|
||||||
"type": "modal",
|
|
||||||
"callback_id": "configure",
|
|
||||||
"title": {"type": "plain_text", "text": "OpenAI API Key"},
|
|
||||||
"submit": {"type": "plain_text", "text": submit},
|
|
||||||
"close": {"type": "plain_text", "text": cancel},
|
|
||||||
"blocks": [
|
|
||||||
{
|
|
||||||
"type": "input",
|
|
||||||
"block_id": "api_key",
|
|
||||||
"label": {"type": "plain_text", "text": api_key_text},
|
|
||||||
"element": {"type": "plain_text_input", "action_id": "input"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "input",
|
|
||||||
"block_id": "model",
|
|
||||||
"label": {"type": "plain_text", "text": "OpenAI Model"},
|
|
||||||
"element": {
|
|
||||||
"type": "static_select",
|
|
||||||
"action_id": "input",
|
|
||||||
"options": [
|
|
||||||
{
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "GPT-3.5 Turbo",
|
|
||||||
},
|
|
||||||
"value": "gpt-3.5-turbo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"text": {"type": "plain_text", "text": "GPT-4"},
|
|
||||||
"value": "gpt-4",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"initial_option": {
|
|
||||||
"text": {
|
|
||||||
"type": "plain_text",
|
|
||||||
"text": "GPT-3.5 Turbo",
|
|
||||||
},
|
|
||||||
"value": "gpt-3.5-turbo",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
def validate_api_key_registration(ack: Ack, view: dict, context: BoltContext):
|
|
||||||
already_set_api_key = context.get("OPENAI_API_KEY")
|
|
||||||
|
|
||||||
inputs = view["state"]["values"]
|
|
||||||
api_key = inputs["api_key"]["input"]["value"]
|
|
||||||
model = inputs["model"]["input"]["selected_option"]["value"]
|
|
||||||
try:
|
|
||||||
# Verify if the API key is valid
|
|
||||||
openai.Model.retrieve(api_key=api_key, id="gpt-3.5-turbo")
|
|
||||||
try:
|
|
||||||
# Verify if the given model works with the API key
|
|
||||||
openai.Model.retrieve(api_key=api_key, id=model)
|
|
||||||
except Exception:
|
|
||||||
text = "This model is not yet available for this API key"
|
|
||||||
if already_set_api_key is not None:
|
|
||||||
text = translate(
|
|
||||||
openai_api_key=already_set_api_key, context=context, text=text
|
|
||||||
)
|
|
||||||
ack(
|
|
||||||
response_action="errors",
|
|
||||||
errors={"model": text},
|
|
||||||
)
|
|
||||||
return
|
|
||||||
ack()
|
|
||||||
except Exception:
|
|
||||||
text = "This API key seems to be invalid"
|
|
||||||
if already_set_api_key is not None:
|
|
||||||
text = translate(
|
|
||||||
openai_api_key=already_set_api_key, context=context, text=text
|
|
||||||
)
|
|
||||||
ack(
|
|
||||||
response_action="errors",
|
|
||||||
errors={"api_key": text},
|
|
||||||
)
|
|
||||||
|
|
||||||
def save_api_key_registration(
|
|
||||||
view: dict,
|
|
||||||
logger: logging.Logger,
|
|
||||||
context: BoltContext,
|
|
||||||
):
|
|
||||||
inputs = view["state"]["values"]
|
|
||||||
api_key = inputs["api_key"]["input"]["value"]
|
|
||||||
model = inputs["model"]["input"]["selected_option"]["value"]
|
|
||||||
try:
|
|
||||||
openai.Model.retrieve(api_key=api_key, id=model)
|
|
||||||
s3_client.put_object(
|
|
||||||
Bucket=openai_bucket_name,
|
|
||||||
Key=context.team_id,
|
|
||||||
Body=json.dumps({"api_key": api_key, "model": model}),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.exception(e)
|
|
||||||
|
|
||||||
app.view("configure")(
|
|
||||||
ack=validate_api_key_registration,
|
|
||||||
lazy=[save_api_key_registration],
|
|
||||||
)
|
|
||||||
|
|
||||||
slack_handler = SlackRequestHandler(app=app)
|
|
||||||
return slack_handler.handle(event, context_)
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
display_information:
|
|
||||||
name: ChatGPT (dev)
|
|
||||||
features:
|
|
||||||
app_home:
|
|
||||||
home_tab_enabled: false
|
|
||||||
messages_tab_enabled: true
|
|
||||||
messages_tab_read_only_enabled: false
|
|
||||||
bot_user:
|
|
||||||
display_name: ChatGPT Bot (dev)
|
|
||||||
always_online: true
|
|
||||||
oauth_config:
|
|
||||||
scopes:
|
|
||||||
bot:
|
|
||||||
- app_mentions:read
|
|
||||||
- channels:history
|
|
||||||
- groups:history
|
|
||||||
- im:history
|
|
||||||
- mpim:history
|
|
||||||
- chat:write.public
|
|
||||||
- chat:write
|
|
||||||
- users:read
|
|
||||||
settings:
|
|
||||||
event_subscriptions:
|
|
||||||
bot_events:
|
|
||||||
- app_mention
|
|
||||||
- message.channels
|
|
||||||
- message.groups
|
|
||||||
- message.im
|
|
||||||
- message.mpim
|
|
||||||
interactivity:
|
|
||||||
is_enabled: true
|
|
||||||
socket_mode_enabled: true
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
display_information:
|
|
||||||
name: ChatGPT
|
|
||||||
description: Interact with ChatGPT in Slack!
|
|
||||||
background_color: "#195208"
|
|
||||||
features:
|
|
||||||
app_home:
|
|
||||||
home_tab_enabled: true
|
|
||||||
messages_tab_enabled: true
|
|
||||||
messages_tab_read_only_enabled: false
|
|
||||||
bot_user:
|
|
||||||
display_name: ChatGPT Bot
|
|
||||||
always_online: true
|
|
||||||
oauth_config:
|
|
||||||
redirect_urls:
|
|
||||||
- https://TODO.amazonaws.com/slack/oauth_redirect
|
|
||||||
scopes:
|
|
||||||
bot:
|
|
||||||
- app_mentions:read
|
|
||||||
- channels:history
|
|
||||||
- groups:history
|
|
||||||
- im:history
|
|
||||||
- mpim:history
|
|
||||||
- chat:write.public
|
|
||||||
- chat:write
|
|
||||||
- users:read
|
|
||||||
settings:
|
|
||||||
event_subscriptions:
|
|
||||||
request_url: https://TODO.amazonaws.com/slack/events
|
|
||||||
bot_events:
|
|
||||||
- app_home_opened
|
|
||||||
- app_mention
|
|
||||||
- app_uninstalled
|
|
||||||
- message.channels
|
|
||||||
- message.groups
|
|
||||||
- message.im
|
|
||||||
- message.mpim
|
|
||||||
- tokens_revoked
|
|
||||||
interactivity:
|
|
||||||
is_enabled: true
|
|
||||||
request_url: https://TODO.amazonaws.com/slack/events
|
|
||||||
org_deploy_enabled: false
|
|
||||||
socket_mode_enabled: false
|
|
||||||
token_rotation_enabled: false
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
slack-bolt>=1.18.0,<2
|
|
||||||
lxml==4.9.3
|
|
||||||
bs4==0.0.1
|
|
||||||
openai>=0.27.4,<0.28
|
|
||||||
tiktoken>=0.3.3,<0.4
|
|
||||||
chromadb==0.3.23
|
|
||||||
langchain==0.0.242
|
|
||||||
GitPython==3.1.31
|
|
||||||
InstructorEmbedding
|
|
||||||
loguru
|
|
||||||
git+https://github.com/mudler/LocalAGI
|
|
||||||
pysqlite3-binary
|
|
||||||
requests
|
|
||||||
ascii-magic
|
|
||||||
duckduckgo_search==4.1.1
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
docker build -t slack-bot .
|
|
||||||
docker run -v $PWD/data:/data --rm -ti --env-file .dockerenv slack-bot
|
|
||||||
97
go.mod
Normal file
97
go.mod
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
module github.com/mudler/LocalAGI
|
||||||
|
|
||||||
|
go 1.22.0
|
||||||
|
|
||||||
|
toolchain go1.22.2
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/bwmarrin/discordgo v0.28.1
|
||||||
|
github.com/chasefleming/elem-go v0.25.0
|
||||||
|
github.com/dave-gray101/v2keyauth v0.0.0-20240624150259-c45d584d25e2
|
||||||
|
github.com/donseba/go-htmx v1.8.0
|
||||||
|
github.com/eritikass/githubmarkdownconvertergo v0.1.10
|
||||||
|
github.com/go-telegram/bot v1.2.1
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.4
|
||||||
|
github.com/gofiber/template/html/v2 v2.1.1
|
||||||
|
github.com/google/go-github/v69 v69.2.0
|
||||||
|
github.com/google/uuid v1.6.0
|
||||||
|
github.com/metoro-io/mcp-golang v0.8.0
|
||||||
|
github.com/onsi/ginkgo/v2 v2.15.0
|
||||||
|
github.com/onsi/gomega v1.31.1
|
||||||
|
github.com/philippgille/chromem-go v0.5.0
|
||||||
|
github.com/sashabaranov/go-openai v1.18.3
|
||||||
|
github.com/slack-go/slack v0.16.0
|
||||||
|
github.com/thoj/go-ircevent v0.0.0-20210723090443-73e444401d64
|
||||||
|
github.com/tmc/langchaingo v0.1.8
|
||||||
|
github.com/traefik/yaegi v0.16.1
|
||||||
|
github.com/valyala/fasthttp v1.52.0
|
||||||
|
golang.org/x/crypto v0.30.0
|
||||||
|
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056
|
||||||
|
mvdan.cc/xurls/v2 v2.6.0
|
||||||
|
)
|
||||||
|
|
||||||
|
require (
|
||||||
|
github.com/PuerkitoBio/goquery v1.8.1 // indirect
|
||||||
|
github.com/andybalholm/brotli v1.1.0 // indirect
|
||||||
|
github.com/andybalholm/cascadia v1.3.2 // indirect
|
||||||
|
github.com/antchfx/htmlquery v1.3.0 // indirect
|
||||||
|
github.com/antchfx/xmlquery v1.3.17 // indirect
|
||||||
|
github.com/antchfx/xpath v1.2.4 // indirect
|
||||||
|
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||||
|
github.com/buger/jsonparser v1.1.1 // indirect
|
||||||
|
github.com/dlclark/regexp2 v1.10.0 // indirect
|
||||||
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
|
github.com/gin-gonic/gin v1.8.1 // indirect
|
||||||
|
github.com/go-logr/logr v1.3.0 // indirect
|
||||||
|
github.com/go-playground/locales v0.14.0 // indirect
|
||||||
|
github.com/go-playground/universal-translator v0.18.0 // indirect
|
||||||
|
github.com/go-playground/validator/v10 v10.10.0 // indirect
|
||||||
|
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect
|
||||||
|
github.com/gobwas/glob v0.2.3 // indirect
|
||||||
|
github.com/goccy/go-json v0.9.7 // indirect
|
||||||
|
github.com/gocolly/colly v1.2.0 // indirect
|
||||||
|
github.com/gofiber/template v1.8.3 // indirect
|
||||||
|
github.com/gofiber/utils v1.1.0 // indirect
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
|
||||||
|
github.com/golang/protobuf v1.5.3 // indirect
|
||||||
|
github.com/google/go-cmp v0.6.0 // indirect
|
||||||
|
github.com/google/go-querystring v1.1.0 // indirect
|
||||||
|
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 // indirect
|
||||||
|
github.com/gorilla/websocket v1.5.3 // indirect
|
||||||
|
github.com/invopop/jsonschema v0.12.0 // indirect
|
||||||
|
github.com/json-iterator/go v1.1.12 // indirect
|
||||||
|
github.com/kennygrant/sanitize v1.2.4 // indirect
|
||||||
|
github.com/klauspost/compress v1.17.7 // indirect
|
||||||
|
github.com/leodido/go-urn v1.2.1 // indirect
|
||||||
|
github.com/mailru/easyjson v0.7.7 // indirect
|
||||||
|
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||||
|
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||||
|
github.com/mattn/go-runewidth v0.0.15 // indirect
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 // indirect
|
||||||
|
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.9 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1 // indirect
|
||||||
|
github.com/pkoukk/tiktoken-go v0.1.6 // indirect
|
||||||
|
github.com/rivo/uniseg v0.2.0 // indirect
|
||||||
|
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d // indirect
|
||||||
|
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf // indirect
|
||||||
|
github.com/temoto/robotstxt v1.1.2 // indirect
|
||||||
|
github.com/tidwall/gjson v1.18.0 // indirect
|
||||||
|
github.com/tidwall/match v1.1.1 // indirect
|
||||||
|
github.com/tidwall/pretty v1.2.1 // indirect
|
||||||
|
github.com/tidwall/sjson v1.2.5 // indirect
|
||||||
|
github.com/ugorji/go/codec v1.2.7 // indirect
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0 // indirect
|
||||||
|
github.com/valyala/tcplisten v1.0.0 // indirect
|
||||||
|
github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect
|
||||||
|
go.starlark.net v0.0.0-20230302034142-4b1e35fe2254 // indirect
|
||||||
|
golang.org/x/net v0.32.0 // indirect
|
||||||
|
golang.org/x/sys v0.28.0 // indirect
|
||||||
|
golang.org/x/text v0.21.0 // indirect
|
||||||
|
golang.org/x/tools v0.28.0 // indirect
|
||||||
|
google.golang.org/appengine v1.6.8 // indirect
|
||||||
|
google.golang.org/protobuf v1.32.0 // indirect
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
|
)
|
||||||
350
go.sum
Normal file
350
go.sum
Normal file
@@ -0,0 +1,350 @@
|
|||||||
|
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/PuerkitoBio/goquery v1.8.1 h1:uQxhNlArOIdbrH1tr0UXwdVFgDcZDrZVdcpygAcwmWM=
|
||||||
|
github.com/PuerkitoBio/goquery v1.8.1/go.mod h1:Q8ICL1kNUJ2sXGoAhPGUdYDJvgQgHzJsnnd3H7Ho5jQ=
|
||||||
|
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
|
||||||
|
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
|
||||||
|
github.com/andybalholm/cascadia v1.3.1/go.mod h1:R4bJ1UQfqADjvDa4P6HZHLh/3OxWWEqc0Sk8XGwHqvA=
|
||||||
|
github.com/andybalholm/cascadia v1.3.2 h1:3Xi6Dw5lHF15JtdcmAHD3i1+T8plmv7BQ/nsViSLyss=
|
||||||
|
github.com/andybalholm/cascadia v1.3.2/go.mod h1:7gtRlve5FxPPgIgX36uWBX58OdBsSS6lUvCFb+h7KvU=
|
||||||
|
github.com/antchfx/htmlquery v1.3.0 h1:5I5yNFOVI+egyia5F2s/5Do2nFWxJz41Tr3DyfKD25E=
|
||||||
|
github.com/antchfx/htmlquery v1.3.0/go.mod h1:zKPDVTMhfOmcwxheXUsx4rKJy8KEY/PU6eXr/2SebQ8=
|
||||||
|
github.com/antchfx/xmlquery v1.3.17 h1:d0qWjPp/D+vtRw7ivCwT5ApH/3CkQU8JOeo3245PpTk=
|
||||||
|
github.com/antchfx/xmlquery v1.3.17/go.mod h1:Afkq4JIeXut75taLSuI31ISJ/zeq+3jG7TunF7noreA=
|
||||||
|
github.com/antchfx/xpath v1.2.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/antchfx/xpath v1.2.4 h1:dW1HB/JxKvGtJ9WyVGJ0sIoEcqftV3SqIstujI+B9XY=
|
||||||
|
github.com/antchfx/xpath v1.2.4/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs=
|
||||||
|
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
|
||||||
|
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
|
||||||
|
github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs=
|
||||||
|
github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0=
|
||||||
|
github.com/bwmarrin/discordgo v0.28.1 h1:gXsuo2GBO7NbR6uqmrrBDplPUx2T3nzu775q/Rd1aG4=
|
||||||
|
github.com/bwmarrin/discordgo v0.28.1/go.mod h1:NJZpH+1AfhIcyQsPeuBKsUtYrRnjkyu0kIVMCHkZtRY=
|
||||||
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
|
github.com/chasefleming/elem-go v0.25.0 h1:LYzr1auk39Bh3bdKloArOFV7sOBnOfSOKxsg58eWL0Q=
|
||||||
|
github.com/chasefleming/elem-go v0.25.0/go.mod h1:hz73qILBIKnTgOujnSMtEj20/epI+f6vg71RUilJAA4=
|
||||||
|
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
|
||||||
|
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
|
||||||
|
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||||
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||||
|
github.com/dave-gray101/v2keyauth v0.0.0-20240624150259-c45d584d25e2 h1:flLYmnQFZNo04x2NPehMbf30m7Pli57xwZ0NFqR/hb0=
|
||||||
|
github.com/dave-gray101/v2keyauth v0.0.0-20240624150259-c45d584d25e2/go.mod h1:NtWqRzAp/1tw+twkW8uuBenEVVYndEAZACWU3F3xdoQ=
|
||||||
|
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||||
|
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||||
|
github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq0=
|
||||||
|
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
|
github.com/donseba/go-htmx v1.8.0 h1:oTx1uUsjXZZVvcZfulZvBSPtdD1jzsvZyuK91+Q8zPE=
|
||||||
|
github.com/donseba/go-htmx v1.8.0/go.mod h1:8PTAYvNKf8+QYis+DpAsggKz+sa2qljtMgvdAeNBh5s=
|
||||||
|
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
|
||||||
|
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
|
||||||
|
github.com/eritikass/githubmarkdownconvertergo v0.1.10 h1:mL93ADvYMOeT15DcGtK9AaFFc+RcWcy6kQBC6yS/5f4=
|
||||||
|
github.com/eritikass/githubmarkdownconvertergo v0.1.10/go.mod h1:BdpHs6imOtzE5KorbUtKa6bZ0ZBh1yFcrTTAL8FwDKY=
|
||||||
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
|
github.com/gin-gonic/gin v1.8.1 h1:4+fr/el88TOO3ewCmQr8cx/CtZ/umlIRIs5M4NTNjf8=
|
||||||
|
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
|
||||||
|
github.com/go-logr/logr v1.3.0 h1:2y3SDp0ZXuc6/cjLSZ+Q3ir+QB9T/iG5yYRXqsagWSY=
|
||||||
|
github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY=
|
||||||
|
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
|
||||||
|
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
|
github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU=
|
||||||
|
github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs=
|
||||||
|
github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho=
|
||||||
|
github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA=
|
||||||
|
github.com/go-playground/validator/v10 v10.10.0 h1:I7mrTYv78z8k8VXa/qJlOlEXn/nBh+BF8dHX5nt/dr0=
|
||||||
|
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
||||||
|
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||||
|
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
|
||||||
|
github.com/go-telegram/bot v1.2.1 h1:FkrixLCtMtPUQAN4plXdNElbhkdXkx2p68YPXKBruDg=
|
||||||
|
github.com/go-telegram/bot v1.2.1/go.mod h1:i2TRs7fXWIeaceF3z7KzsMt/he0TwkVC680mvdTFYeM=
|
||||||
|
github.com/go-test/deep v1.0.4 h1:u2CU3YKy9I2pmu9pX0eq50wCgjfGIt539SqR7FbHiho=
|
||||||
|
github.com/go-test/deep v1.0.4/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
|
||||||
|
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
|
||||||
|
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
|
||||||
|
github.com/goccy/go-json v0.9.7 h1:IcB+Aqpx/iMHu5Yooh7jEzJk1JZ7Pjtmys2ukPr7EeM=
|
||||||
|
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
|
github.com/gocolly/colly v1.2.0 h1:qRz9YAn8FIH0qzgNUw+HT9UN7wm1oF9OBAilwEWpyrI=
|
||||||
|
github.com/gocolly/colly v1.2.0/go.mod h1:Hof5T3ZswNVsOHYmba1u03W65HDWgpV5HifSuueE0EA=
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.4 h1:P+T+4iK7VaqUsq2PALYEfBBo6bJZ4q3FP8cZ84EggTM=
|
||||||
|
github.com/gofiber/fiber/v2 v2.52.4/go.mod h1:KEOE+cXMhXG0zHc9d8+E38hoX+ZN7bhOtgeF2oT6jrQ=
|
||||||
|
github.com/gofiber/template v1.8.3 h1:hzHdvMwMo/T2kouz2pPCA0zGiLCeMnoGsQZBTSYgZxc=
|
||||||
|
github.com/gofiber/template v1.8.3/go.mod h1:bs/2n0pSNPOkRa5VJ8zTIvedcI/lEYxzV3+YPXdBvq8=
|
||||||
|
github.com/gofiber/template/html/v2 v2.1.1 h1:QEy3O3EBkvwDthy5bXVGUseOyO6ldJoiDxlF4+MJiV8=
|
||||||
|
github.com/gofiber/template/html/v2 v2.1.1/go.mod h1:2G0GHHOUx70C1LDncoBpe4T6maQbNa4x1CVNFW0wju0=
|
||||||
|
github.com/gofiber/utils v1.1.0 h1:vdEBpn7AzIUJRhe+CiTOJdUcTg4Q9RK+pEa0KPbLdrM=
|
||||||
|
github.com/gofiber/utils v1.1.0/go.mod h1:poZpsnhBykfnY1Mc0KeEa6mSHrS3dV0+oBWyeQmb2e0=
|
||||||
|
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE=
|
||||||
|
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||||
|
github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
|
||||||
|
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
|
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
|
||||||
|
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
|
||||||
|
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
|
||||||
|
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
|
||||||
|
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||||
|
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
|
||||||
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
|
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
|
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||||
|
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
|
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||||
|
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE=
|
||||||
|
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
|
||||||
|
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
|
||||||
|
github.com/google/go-github/v69 v69.2.0 h1:wR+Wi/fN2zdUx9YxSmYE0ktiX9IAR/BeePzeaUUbEHE=
|
||||||
|
github.com/google/go-github/v69 v69.2.0/go.mod h1:xne4jymxLR6Uj9b7J7PyTpkMYstEMMwGZa0Aehh1azM=
|
||||||
|
github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=
|
||||||
|
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||||
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
|
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
|
||||||
|
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||||
|
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
|
||||||
|
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
|
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/gorilla/websocket v1.5.3 h1:saDtZ6Pbx/0u+bgYQ3q96pZgCzfhKXGPqt7kZ72aNNg=
|
||||||
|
github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||||
|
github.com/invopop/jsonschema v0.12.0 h1:6ovsNSuvn9wEQVOyc72aycBMVQFKz7cPdMJn10CvzRI=
|
||||||
|
github.com/invopop/jsonschema v0.12.0/go.mod h1:ffZ5Km5SWWRAIN6wbDXItl95euhFz2uON45H2qjYt+0=
|
||||||
|
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||||
|
github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM=
|
||||||
|
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||||
|
github.com/kennygrant/sanitize v1.2.4 h1:gN25/otpP5vAsO2djbMhF/LQX6R7+O1TB4yv8NzpJ3o=
|
||||||
|
github.com/kennygrant/sanitize v1.2.4/go.mod h1:LGsjYYtgxbetdg5owWB2mpgUL6e2nfw2eObZ0u0qvak=
|
||||||
|
github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg=
|
||||||
|
github.com/klauspost/compress v1.17.7/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||||
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
|
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||||
|
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
|
||||||
|
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||||
|
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||||
|
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||||
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
|
||||||
|
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||||
|
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||||
|
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
|
||||||
|
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
|
||||||
|
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
|
||||||
|
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||||
|
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
|
||||||
|
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
|
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||||
|
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
||||||
|
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||||
|
github.com/metoro-io/mcp-golang v0.8.0 h1:DkigHa3w7WwMFomcEz5wiMDX94DsvVm/3mCV3d1obnc=
|
||||||
|
github.com/metoro-io/mcp-golang v0.8.0/go.mod h1:ifLP9ZzKpN1UqFWNTpAHOqSvNkMK6b7d1FSZ5Lu0lN0=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
|
||||||
|
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
|
||||||
|
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||||
|
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||||
|
github.com/onsi/ginkgo/v2 v2.15.0 h1:79HwNRBAZHOEwrczrgSOPy+eFTTlIGELKy5as+ClttY=
|
||||||
|
github.com/onsi/ginkgo/v2 v2.15.0/go.mod h1:HlxMHtYF57y6Dpf+mc5529KKmSq9h2FpCF+/ZkwUxKM=
|
||||||
|
github.com/onsi/gomega v1.31.1 h1:KYppCUK+bUgAZwHOu7EXVBKyQA6ILvOESHkn/tgoqvo=
|
||||||
|
github.com/onsi/gomega v1.31.1/go.mod h1:y40C95dwAD1Nz36SsEnxvfFe8FFfNxzI5eJ0EYGyAy0=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
|
||||||
|
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||||
|
github.com/philippgille/chromem-go v0.5.0 h1:bryX0F3N6jnN/21iBd8i2/k9EzPTZn3nyiqAti19si8=
|
||||||
|
github.com/philippgille/chromem-go v0.5.0/go.mod h1:hTd+wGEm/fFPQl7ilfCwQXkgEUxceYh86iIdoKMolPo=
|
||||||
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkoukk/tiktoken-go v0.1.6 h1:JF0TlJzhTbrI30wCvFuiw6FzP2+/bR+FIxUdgEAcUsw=
|
||||||
|
github.com/pkoukk/tiktoken-go v0.1.6/go.mod h1:9NiV+i9mJKGj1rYOT+njbv+ZwA/zJxYdewGl6qVatpg=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
|
||||||
|
github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY=
|
||||||
|
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||||
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
|
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||||
|
github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a h1:w3tdWGKbLGBPtR/8/oO74W6hmz0qE5q0z9aqSAewaaM=
|
||||||
|
github.com/rogpeppe/go-internal v1.13.2-0.20241226121412-a5dc8ff20d0a/go.mod h1:S8kfXMp+yh77OxPD4fdM6YUknrZpQxLhvxzS4gDHENY=
|
||||||
|
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d h1:hrujxIzL1woJ7AwssoOcM/tq5JjjG2yYOc8odClEiXA=
|
||||||
|
github.com/saintfish/chardet v0.0.0-20230101081208-5e3ef4b5456d/go.mod h1:uugorj2VCxiV1x+LzaIdVa9b4S4qGAcH6cbhh4qVxOU=
|
||||||
|
github.com/sashabaranov/go-openai v1.18.3 h1:dspFGkmZbhjg1059KhqLYSV2GaCiRIn+bOu50TlXUq8=
|
||||||
|
github.com/sashabaranov/go-openai v1.18.3/go.mod h1:lj5b/K+zjTSFxVLijLSTDZuP7adOgerWeFyZLUhAKRg=
|
||||||
|
github.com/slack-go/slack v0.16.0 h1:khp/WCFv+Hb/B/AJaAwvcxKun0hM6grN0bUZ8xG60P8=
|
||||||
|
github.com/slack-go/slack v0.16.0/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw=
|
||||||
|
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf h1:pvbZ0lM0XWPBqUKqFU8cmavspvIl9nulOYwdy6IFRRo=
|
||||||
|
github.com/ssor/bom v0.0.0-20170718123548-6386211fdfcf/go.mod h1:RJID2RhlZKId02nZ62WenDCkgHFerpIOmW0iT7GKmXM=
|
||||||
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
|
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||||
|
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||||
|
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||||
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
|
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||||
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
|
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
|
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
|
||||||
|
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||||
|
github.com/temoto/robotstxt v1.1.2 h1:W2pOjSJ6SWvldyEuiFXNxz3xZ8aiWX5LbfDiOFd7Fxg=
|
||||||
|
github.com/temoto/robotstxt v1.1.2/go.mod h1:+1AmkuG3IYkh1kv0d2qEB9Le88ehNO0zwOr3ujewlOo=
|
||||||
|
github.com/thoj/go-ircevent v0.0.0-20210723090443-73e444401d64 h1:l/T7dYuJEQZOwVOpjIXr1180aM9PZL/d1MnMVIxefX4=
|
||||||
|
github.com/thoj/go-ircevent v0.0.0-20210723090443-73e444401d64/go.mod h1:Q1NAJOuRdQCqN/VIWdnaaEhV8LpeO2rtlBP7/iDJNII=
|
||||||
|
github.com/tidwall/gjson v1.14.2/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
|
github.com/tidwall/gjson v1.18.0 h1:FIDeeyB800efLX89e5a8Y0BNH+LOngJyGrIWxG2FKQY=
|
||||||
|
github.com/tidwall/gjson v1.18.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk=
|
||||||
|
github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA=
|
||||||
|
github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM=
|
||||||
|
github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
|
github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4=
|
||||||
|
github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU=
|
||||||
|
github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY=
|
||||||
|
github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28=
|
||||||
|
github.com/tmc/langchaingo v0.1.8 h1:nrImgh0aWdu3stJTHz80N60WGwPWY8HXCK10gQny7bA=
|
||||||
|
github.com/tmc/langchaingo v0.1.8/go.mod h1:iNBfS9e6jxBKsJSPWnlqNhoVWgdA3D1g5cdFJjbIZNQ=
|
||||||
|
github.com/traefik/yaegi v0.16.1 h1:f1De3DVJqIDKmnasUF6MwmWv1dSEEat0wcpXhD2On3E=
|
||||||
|
github.com/traefik/yaegi v0.16.1/go.mod h1:4eVhbPb3LnD2VigQjhYbEJ69vDRFdT2HQNrXx8eEwUY=
|
||||||
|
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
|
||||||
|
github.com/ugorji/go/codec v1.2.7 h1:YPXUKf7fYbp/y8xloBqZOw2qaVggbfwMlI8WM3wZUJ0=
|
||||||
|
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
|
github.com/valyala/fasthttp v1.52.0 h1:wqBQpxH71XW0e2g+Og4dzQM8pk34aFYlA1Ga8db7gU0=
|
||||||
|
github.com/valyala/fasthttp v1.52.0/go.mod h1:hf5C4QnVMkNXMspnsUlfM3WitlgYflyhHYoKol/szxQ=
|
||||||
|
github.com/valyala/tcplisten v1.0.0 h1:rBHj/Xf+E1tRGZyWIWwJDiRY0zc1Js+CV5DqwacVSA8=
|
||||||
|
github.com/valyala/tcplisten v1.0.0/go.mod h1:T0xQ8SeCZGxckz9qRXTfG43PvQ/mcWh7FwZEA7Ioqkc=
|
||||||
|
github.com/wk8/go-ordered-map/v2 v2.1.8 h1:5h/BUHu93oj4gIdvHHHGsScSTMijfx5PeYkE/fJgbpc=
|
||||||
|
github.com/wk8/go-ordered-map/v2 v2.1.8/go.mod h1:5nJHM5DyteebpVlHnWMV0rPz6Zp7+xBAnxjb1X5vnTw=
|
||||||
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
|
go.starlark.net v0.0.0-20230302034142-4b1e35fe2254 h1:Ss6D3hLXTM0KobyBYEAygXzFfGcjnmfEJOBgSbemCtg=
|
||||||
|
go.starlark.net v0.0.0-20230302034142-4b1e35fe2254/go.mod h1:jxU+3+j+71eXOW14274+SmmuW82qJzl6iZSeqEtTGds=
|
||||||
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
|
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||||
|
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
|
golang.org/x/crypto v0.30.0 h1:RwoQn3GkWiMkzlX562cLB7OxWvjH1L8xutO2WoJcRoY=
|
||||||
|
golang.org/x/crypto v0.30.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk=
|
||||||
|
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||||
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
|
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
|
||||||
|
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
|
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
|
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
|
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20210916014120-12bc252f5db8/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
|
golang.org/x/net v0.5.0/go.mod h1:DivGGAXEgPSlEBzxGzZI+ZLohi+xUj054jfeKui00ws=
|
||||||
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
|
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
||||||
|
golang.org/x/net v0.32.0 h1:ZqPmj8Kzc+Y6e0+skZsuACbx+wzMgo5MQsJh9Qd6aYI=
|
||||||
|
golang.org/x/net v0.32.0/go.mod h1:CwU0IoeOlnQQWJ6ioyFrfRuomB8GKF6KbYXZVyeXNfs=
|
||||||
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
|
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
|
golang.org/x/sys v0.28.0 h1:Fksou7UEQUWlKvIdsqzJmUmCX3cZuD2+P3XyyzwMhlA=
|
||||||
|
golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||||
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.0.0-20220526004731-065cf7ba2467/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
|
golang.org/x/term v0.4.0/go.mod h1:9P2UbLfCdcvo3p/nzKvsmas4TnlujnuoV9hGgYzW1lQ=
|
||||||
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
|
golang.org/x/term v0.7.0/go.mod h1:P32HKFT3hSsZrRxla30E9HqToFYAQPCMs/zFMBUFqPY=
|
||||||
|
golang.org/x/term v0.27.0 h1:WP60Sv1nlK1T6SupCHbXzSaN0b9wUmsPoRS9b61A23Q=
|
||||||
|
golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM=
|
||||||
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
|
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||||
|
golang.org/x/text v0.6.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
|
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||||
|
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
|
||||||
|
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
|
||||||
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
|
golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY=
|
||||||
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
|
golang.org/x/tools v0.28.0 h1:WuB6qZ4RPCQo5aP3WdKZS7i595EdWqWR8vqJTlwTVK8=
|
||||||
|
golang.org/x/tools v0.28.0/go.mod h1:dcIOrVd3mfQKTgrDVQHqCPMWy6lnhfhtX3hLXYVLfRw=
|
||||||
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
|
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||||
|
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||||
|
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
|
||||||
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
|
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
|
||||||
|
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
|
||||||
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
|
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||||
|
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
|
||||||
|
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
|
||||||
|
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
|
||||||
|
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
|
||||||
|
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
|
||||||
|
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
|
||||||
|
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||||
|
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||||
|
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||||
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
|
google.golang.org/protobuf v1.32.0 h1:pPC6BG5ex8PDFnkbrGU3EixyhKcQ2aDuBS36lqK/C7I=
|
||||||
|
google.golang.org/protobuf v1.32.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||||
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||||
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||||
|
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||||
|
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||||
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
|
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056 h1:6YFJoB+0fUH6X3xU/G2tQqCYg+PkGtnZ5nMR5rpw72g=
|
||||||
|
jaytaylor.com/html2text v0.0.0-20230321000545-74c2419ad056/go.mod h1:OxvTsCwKosqQ1q7B+8FwXqg4rKZ/UG9dUW+g/VL2xH4=
|
||||||
|
mvdan.cc/xurls/v2 v2.6.0 h1:3NTZpeTxYVWNSokW3MKeyVkz/j7uYXYiMtXRUfmjbgI=
|
||||||
|
mvdan.cc/xurls/v2 v2.6.0/go.mod h1:bCvEZ1XvdA6wDnxY7jPPjEmigDtvtvPXAD/Exa9IMSk=
|
||||||
15
jsconfig.json
Normal file
15
jsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "ESNext",
|
||||||
|
"moduleResolution": "Bundler",
|
||||||
|
"target": "ES2022",
|
||||||
|
"jsx": "react",
|
||||||
|
"allowImportingTsExtensions": true,
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"strictFunctionTypes": true
|
||||||
|
},
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
"**/node_modules/*"
|
||||||
|
]
|
||||||
|
}
|
||||||
92
main.go
Normal file
92
main.go
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/state"
|
||||||
|
"github.com/mudler/LocalAGI/services"
|
||||||
|
"github.com/mudler/LocalAGI/webui"
|
||||||
|
)
|
||||||
|
|
||||||
|
var baseModel = os.Getenv("LOCALAGI_MODEL")
|
||||||
|
var multimodalModel = os.Getenv("LOCALAGI_MULTIMODAL_MODEL")
|
||||||
|
var apiURL = os.Getenv("LOCALAGI_LLM_API_URL")
|
||||||
|
var apiKey = os.Getenv("LOCALAGI_LLM_API_KEY")
|
||||||
|
var timeout = os.Getenv("LOCALAGI_TIMEOUT")
|
||||||
|
var stateDir = os.Getenv("LOCALAGI_STATE_DIR")
|
||||||
|
var localRAG = os.Getenv("LOCALAGI_LOCALRAG_URL")
|
||||||
|
var withLogs = os.Getenv("LOCALAGI_ENABLE_CONVERSATIONS_LOGGING") == "true"
|
||||||
|
var apiKeysEnv = os.Getenv("LOCALAGI_API_KEYS")
|
||||||
|
var imageModel = os.Getenv("LOCALAGI_IMAGE_MODEL")
|
||||||
|
var conversationDuration = os.Getenv("LOCALAGI_CONVERSATION_DURATION")
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
if baseModel == "" {
|
||||||
|
panic("LOCALAGI_MODEL not set")
|
||||||
|
}
|
||||||
|
if apiURL == "" {
|
||||||
|
panic("LOCALAGI_API_URL not set")
|
||||||
|
}
|
||||||
|
if timeout == "" {
|
||||||
|
timeout = "5m"
|
||||||
|
}
|
||||||
|
if stateDir == "" {
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
stateDir = filepath.Join(cwd, "pool")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
// make sure state dir exists
|
||||||
|
os.MkdirAll(stateDir, 0755)
|
||||||
|
|
||||||
|
apiKeys := []string{}
|
||||||
|
if apiKeysEnv != "" {
|
||||||
|
apiKeys = strings.Split(apiKeysEnv, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the agent pool
|
||||||
|
pool, err := state.NewAgentPool(
|
||||||
|
baseModel,
|
||||||
|
multimodalModel,
|
||||||
|
imageModel,
|
||||||
|
apiURL,
|
||||||
|
apiKey,
|
||||||
|
stateDir,
|
||||||
|
localRAG,
|
||||||
|
services.Actions,
|
||||||
|
services.Connectors,
|
||||||
|
services.DynamicPrompts,
|
||||||
|
timeout,
|
||||||
|
withLogs,
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the application
|
||||||
|
app := webui.NewApp(
|
||||||
|
webui.WithPool(pool),
|
||||||
|
webui.WithConversationStoreduration(conversationDuration),
|
||||||
|
webui.WithApiKeys(apiKeys...),
|
||||||
|
webui.WithLLMAPIUrl(apiURL),
|
||||||
|
webui.WithLLMAPIKey(apiKey),
|
||||||
|
webui.WithLLMModel(baseModel),
|
||||||
|
webui.WithStateDir(stateDir),
|
||||||
|
)
|
||||||
|
|
||||||
|
// Start the agents
|
||||||
|
if err := pool.StartAll(); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start the web server
|
||||||
|
log.Fatal(app.Listen(":3000"))
|
||||||
|
}
|
||||||
434
main.py
434
main.py
@@ -1,434 +0,0 @@
|
|||||||
import openai
|
|
||||||
#from langchain.embeddings import HuggingFaceEmbeddings
|
|
||||||
from langchain.embeddings import LocalAIEmbeddings
|
|
||||||
import uuid
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from localagi import LocalAGI
|
|
||||||
from loguru import logger
|
|
||||||
from ascii_magic import AsciiArt
|
|
||||||
from duckduckgo_search import DDGS
|
|
||||||
from typing import Dict, List
|
|
||||||
import os
|
|
||||||
|
|
||||||
# these three lines swap the stdlib sqlite3 lib with the pysqlite3 package for chroma
|
|
||||||
__import__('pysqlite3')
|
|
||||||
import sys
|
|
||||||
sys.modules['sqlite3'] = sys.modules.pop('pysqlite3')
|
|
||||||
|
|
||||||
from langchain.vectorstores import Chroma
|
|
||||||
from chromadb.config import Settings
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
# Parse arguments such as system prompt and batch mode
|
|
||||||
import argparse
|
|
||||||
parser = argparse.ArgumentParser(description='LocalAGI')
|
|
||||||
# System prompt
|
|
||||||
parser.add_argument('--system-prompt', dest='system_prompt', action='store',
|
|
||||||
help='System prompt to use')
|
|
||||||
# Batch mode
|
|
||||||
parser.add_argument('--prompt', dest='prompt', action='store', default=False,
|
|
||||||
help='Prompt mode')
|
|
||||||
# Interactive mode
|
|
||||||
parser.add_argument('--interactive', dest='interactive', action='store_true', default=False,
|
|
||||||
help='Interactive mode. Can be used with --prompt to start an interactive session')
|
|
||||||
# skip avatar creation
|
|
||||||
parser.add_argument('--skip-avatar', dest='skip_avatar', action='store_true', default=False,
|
|
||||||
help='Skip avatar creation')
|
|
||||||
# Reevaluate
|
|
||||||
parser.add_argument('--re-evaluate', dest='re_evaluate', action='store_true', default=False,
|
|
||||||
help='Reevaluate if another action is needed or we have completed the user request')
|
|
||||||
# Postprocess
|
|
||||||
parser.add_argument('--postprocess', dest='postprocess', action='store_true', default=False,
|
|
||||||
help='Postprocess the reasoning')
|
|
||||||
# Subtask context
|
|
||||||
parser.add_argument('--subtask-context', dest='subtaskContext', action='store_true', default=False,
|
|
||||||
help='Include context in subtasks')
|
|
||||||
|
|
||||||
# Search results number
|
|
||||||
parser.add_argument('--search-results', dest='search_results', type=int, action='store', default=2,
|
|
||||||
help='Number of search results to return')
|
|
||||||
# Plan message
|
|
||||||
parser.add_argument('--plan-message', dest='plan_message', action='store',
|
|
||||||
help="What message to use during planning",
|
|
||||||
)
|
|
||||||
|
|
||||||
DEFAULT_PROMPT="floating hair, portrait, ((loli)), ((one girl)), cute face, hidden hands, asymmetrical bangs, beautiful detailed eyes, eye shadow, hair ornament, ribbons, bowties, buttons, pleated skirt, (((masterpiece))), ((best quality)), colorful|((part of the head)), ((((mutated hands and fingers)))), deformed, blurry, bad anatomy, disfigured, poorly drawn face, mutation, mutated, extra limb, ugly, poorly drawn hands, missing limb, blurry, floating limbs, disconnected limbs, malformed hands, blur, out of focus, long neck, long body, Octane renderer, lowres, bad anatomy, bad hands, text"
|
|
||||||
DEFAULT_API_BASE = os.environ.get("DEFAULT_API_BASE", "http://api:8080")
|
|
||||||
# TTS api base
|
|
||||||
parser.add_argument('--tts-api-base', dest='tts_api_base', action='store', default=DEFAULT_API_BASE,
|
|
||||||
help='TTS api base')
|
|
||||||
# LocalAI api base
|
|
||||||
parser.add_argument('--localai-api-base', dest='localai_api_base', action='store', default=DEFAULT_API_BASE,
|
|
||||||
help='LocalAI api base')
|
|
||||||
# Images api base
|
|
||||||
parser.add_argument('--images-api-base', dest='images_api_base', action='store', default=DEFAULT_API_BASE,
|
|
||||||
help='Images api base')
|
|
||||||
# Embeddings api base
|
|
||||||
parser.add_argument('--embeddings-api-base', dest='embeddings_api_base', action='store', default=DEFAULT_API_BASE,
|
|
||||||
help='Embeddings api base')
|
|
||||||
# Functions model
|
|
||||||
parser.add_argument('--functions-model', dest='functions_model', action='store', default="functions",
|
|
||||||
help='Functions model')
|
|
||||||
# Embeddings model
|
|
||||||
parser.add_argument('--embeddings-model', dest='embeddings_model', action='store', default="all-MiniLM-L6-v2",
|
|
||||||
help='Embeddings model')
|
|
||||||
# LLM model
|
|
||||||
parser.add_argument('--llm-model', dest='llm_model', action='store', default="gpt-4",
|
|
||||||
help='LLM model')
|
|
||||||
# Voice model
|
|
||||||
parser.add_argument('--tts-model', dest='tts_model', action='store', default="en-us-kathleen-low.onnx",
|
|
||||||
help='TTS model')
|
|
||||||
# Stable diffusion model
|
|
||||||
parser.add_argument('--stablediffusion-model', dest='stablediffusion_model', action='store', default="stablediffusion",
|
|
||||||
help='Stable diffusion model')
|
|
||||||
# Stable diffusion prompt
|
|
||||||
parser.add_argument('--stablediffusion-prompt', dest='stablediffusion_prompt', action='store', default=DEFAULT_PROMPT,
|
|
||||||
help='Stable diffusion prompt')
|
|
||||||
# Force action
|
|
||||||
parser.add_argument('--force-action', dest='force_action', action='store', default="",
|
|
||||||
help='Force an action')
|
|
||||||
# Debug mode
|
|
||||||
parser.add_argument('--debug', dest='debug', action='store_true', default=False,
|
|
||||||
help='Debug mode')
|
|
||||||
# Critic mode
|
|
||||||
parser.add_argument('--critic', dest='critic', action='store_true', default=False,
|
|
||||||
help='Enable critic')
|
|
||||||
# Parse arguments
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
STABLEDIFFUSION_MODEL = os.environ.get("STABLEDIFFUSION_MODEL", args.stablediffusion_model)
|
|
||||||
STABLEDIFFUSION_PROMPT = os.environ.get("STABLEDIFFUSION_PROMPT", args.stablediffusion_prompt)
|
|
||||||
FUNCTIONS_MODEL = os.environ.get("FUNCTIONS_MODEL", args.functions_model)
|
|
||||||
EMBEDDINGS_MODEL = os.environ.get("EMBEDDINGS_MODEL", args.embeddings_model)
|
|
||||||
LLM_MODEL = os.environ.get("LLM_MODEL", args.llm_model)
|
|
||||||
VOICE_MODEL= os.environ.get("TTS_MODEL",args.tts_model)
|
|
||||||
STABLEDIFFUSION_MODEL = os.environ.get("STABLEDIFFUSION_MODEL",args.stablediffusion_model)
|
|
||||||
STABLEDIFFUSION_PROMPT = os.environ.get("STABLEDIFFUSION_PROMPT", args.stablediffusion_prompt)
|
|
||||||
PERSISTENT_DIR = os.environ.get("PERSISTENT_DIR", "/data")
|
|
||||||
SYSTEM_PROMPT = ""
|
|
||||||
if os.environ.get("SYSTEM_PROMPT") or args.system_prompt:
|
|
||||||
SYSTEM_PROMPT = os.environ.get("SYSTEM_PROMPT", args.system_prompt)
|
|
||||||
|
|
||||||
LOCALAI_API_BASE = args.localai_api_base
|
|
||||||
TTS_API_BASE = args.tts_api_base
|
|
||||||
IMAGE_API_BASE = args.images_api_base
|
|
||||||
EMBEDDINGS_API_BASE = args.embeddings_api_base
|
|
||||||
|
|
||||||
# Set log level
|
|
||||||
LOG_LEVEL = "INFO"
|
|
||||||
|
|
||||||
def my_filter(record):
|
|
||||||
return record["level"].no >= logger.level(LOG_LEVEL).no
|
|
||||||
|
|
||||||
logger.remove()
|
|
||||||
logger.add(sys.stderr, filter=my_filter)
|
|
||||||
|
|
||||||
if args.debug:
|
|
||||||
LOG_LEVEL = "DEBUG"
|
|
||||||
logger.debug("Debug mode on")
|
|
||||||
|
|
||||||
FUNCTIONS_MODEL = os.environ.get("FUNCTIONS_MODEL", args.functions_model)
|
|
||||||
EMBEDDINGS_MODEL = os.environ.get("EMBEDDINGS_MODEL", args.embeddings_model)
|
|
||||||
LLM_MODEL = os.environ.get("LLM_MODEL", args.llm_model)
|
|
||||||
VOICE_MODEL= os.environ.get("TTS_MODEL",args.tts_model)
|
|
||||||
STABLEDIFFUSION_MODEL = os.environ.get("STABLEDIFFUSION_MODEL",args.stablediffusion_model)
|
|
||||||
STABLEDIFFUSION_PROMPT = os.environ.get("STABLEDIFFUSION_PROMPT", args.stablediffusion_prompt)
|
|
||||||
PERSISTENT_DIR = os.environ.get("PERSISTENT_DIR", "/data")
|
|
||||||
SYSTEM_PROMPT = ""
|
|
||||||
if os.environ.get("SYSTEM_PROMPT") or args.system_prompt:
|
|
||||||
SYSTEM_PROMPT = os.environ.get("SYSTEM_PROMPT", args.system_prompt)
|
|
||||||
|
|
||||||
LOCALAI_API_BASE = args.localai_api_base
|
|
||||||
TTS_API_BASE = args.tts_api_base
|
|
||||||
IMAGE_API_BASE = args.images_api_base
|
|
||||||
EMBEDDINGS_API_BASE = args.embeddings_api_base
|
|
||||||
|
|
||||||
## Constants
|
|
||||||
REPLY_ACTION = "reply"
|
|
||||||
PLAN_ACTION = "plan"
|
|
||||||
|
|
||||||
embeddings = LocalAIEmbeddings(model=EMBEDDINGS_MODEL,openai_api_base=EMBEDDINGS_API_BASE)
|
|
||||||
chroma_client = Chroma(collection_name="memories", persist_directory="db", embedding_function=embeddings)
|
|
||||||
|
|
||||||
# Function to create images with LocalAI
|
|
||||||
def display_avatar(agi, input_text=STABLEDIFFUSION_PROMPT, model=STABLEDIFFUSION_MODEL):
|
|
||||||
image_url = agi.get_avatar(input_text, model)
|
|
||||||
# convert the image to ascii art
|
|
||||||
my_art = AsciiArt.from_url(image_url)
|
|
||||||
my_art.to_terminal()
|
|
||||||
|
|
||||||
## This function is called to ask the user if does agree on the action to take and execute
|
|
||||||
def ask_user_confirmation(action_name, action_parameters):
|
|
||||||
logger.info("==> Ask user confirmation")
|
|
||||||
logger.info("==> action_name: {action_name}", action_name=action_name)
|
|
||||||
logger.info("==> action_parameters: {action_parameters}", action_parameters=action_parameters)
|
|
||||||
# Ask via stdin
|
|
||||||
logger.info("==> Do you want to execute the action? (y/n)")
|
|
||||||
user_input = input()
|
|
||||||
if user_input == "y":
|
|
||||||
logger.info("==> Executing action")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
logger.info("==> Skipping action")
|
|
||||||
return False
|
|
||||||
|
|
||||||
### Agent capabilities
|
|
||||||
### These functions are called by the agent to perform actions
|
|
||||||
###
|
|
||||||
def save(memory, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(memory)
|
|
||||||
logger.info(">>> saving to memories: ")
|
|
||||||
logger.info(q["content"])
|
|
||||||
chroma_client.add_texts([q["content"]],[{"id": str(uuid.uuid4())}])
|
|
||||||
chroma_client.persist()
|
|
||||||
return f"The object was saved permanently to memory."
|
|
||||||
|
|
||||||
def search_memory(query, agent_actions={}, localagi=None):
|
|
||||||
q = json.loads(query)
|
|
||||||
docs = chroma_client.similarity_search(q["reasoning"])
|
|
||||||
text_res="Memories found in the database:\n"
|
|
||||||
for doc in docs:
|
|
||||||
text_res+="- "+doc.page_content+"\n"
|
|
||||||
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
#return text_res
|
|
||||||
return localagi.post_process(text_res)
|
|
||||||
|
|
||||||
|
|
||||||
# write file to disk with content
|
|
||||||
def save_file(arg, agent_actions={}, localagi=None):
|
|
||||||
arg = json.loads(arg)
|
|
||||||
filename = arg["filename"]
|
|
||||||
content = arg["content"]
|
|
||||||
# create persistent dir if does not exist
|
|
||||||
if not os.path.exists(PERSISTENT_DIR):
|
|
||||||
os.makedirs(PERSISTENT_DIR)
|
|
||||||
# write the file in the directory specified
|
|
||||||
filename = os.path.join(PERSISTENT_DIR, filename)
|
|
||||||
with open(filename, 'w') as f:
|
|
||||||
f.write(content)
|
|
||||||
return f"File {filename} saved successfully."
|
|
||||||
|
|
||||||
|
|
||||||
def ddg(query: str, num_results: int, backend: str = "api") -> List[Dict[str, str]]:
|
|
||||||
"""Run query through DuckDuckGo and return metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
query: The query to search for.
|
|
||||||
num_results: The number of results to return.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A list of dictionaries with the following keys:
|
|
||||||
snippet - The description of the result.
|
|
||||||
title - The title of the result.
|
|
||||||
link - The link to the result.
|
|
||||||
"""
|
|
||||||
|
|
||||||
with DDGS() as ddgs:
|
|
||||||
results = ddgs.text(
|
|
||||||
query,
|
|
||||||
backend=backend,
|
|
||||||
)
|
|
||||||
if results is None:
|
|
||||||
return [{"Result": "No good DuckDuckGo Search Result was found"}]
|
|
||||||
|
|
||||||
def to_metadata(result: Dict) -> Dict[str, str]:
|
|
||||||
if backend == "news":
|
|
||||||
return {
|
|
||||||
"date": result["date"],
|
|
||||||
"title": result["title"],
|
|
||||||
"snippet": result["body"],
|
|
||||||
"source": result["source"],
|
|
||||||
"link": result["url"],
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
"snippet": result["body"],
|
|
||||||
"title": result["title"],
|
|
||||||
"link": result["href"],
|
|
||||||
}
|
|
||||||
|
|
||||||
formatted_results = []
|
|
||||||
for i, res in enumerate(results, 1):
|
|
||||||
if res is not None:
|
|
||||||
formatted_results.append(to_metadata(res))
|
|
||||||
if len(formatted_results) == num_results:
|
|
||||||
break
|
|
||||||
return formatted_results
|
|
||||||
|
|
||||||
## Search on duckduckgo
|
|
||||||
def search_duckduckgo(a, agent_actions={}, localagi=None):
|
|
||||||
a = json.loads(a)
|
|
||||||
list=ddg(a["query"], args.search_results)
|
|
||||||
|
|
||||||
text_res=""
|
|
||||||
for doc in list:
|
|
||||||
text_res+=f"""{doc["link"]}: {doc["title"]} {doc["snippet"]}\n"""
|
|
||||||
|
|
||||||
#if args.postprocess:
|
|
||||||
# return post_process(text_res)
|
|
||||||
return text_res
|
|
||||||
#l = json.dumps(list)
|
|
||||||
#return l
|
|
||||||
|
|
||||||
### End Agent capabilities
|
|
||||||
###
|
|
||||||
|
|
||||||
### Agent action definitions
|
|
||||||
agent_actions = {
|
|
||||||
"search_internet": {
|
|
||||||
"function": search_duckduckgo,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'For searching the internet with a query, the assistant replies with the action "search_internet" and the query to search.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_internet",
|
|
||||||
"description": """For searching internet.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"query": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_file": {
|
|
||||||
"function": save_file,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_file", the filename and content to save for writing a file to disk permanently. This can be used to store the result of complex actions locally.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_file",
|
|
||||||
"description": """For saving a file to disk with content.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"filename": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"save_memory": {
|
|
||||||
"function": save,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "save_memory" and the string to remember or store an information that thinks it is relevant permanently.',
|
|
||||||
"signature": {
|
|
||||||
"name": "save_memory",
|
|
||||||
"description": """Save or store informations into memory.""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"content": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "information to save"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["content"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"search_memory": {
|
|
||||||
"function": search_memory,
|
|
||||||
"plannable": True,
|
|
||||||
"description": 'The assistant replies with the action "search_memory" for searching between its memories with a query term.',
|
|
||||||
"signature": {
|
|
||||||
"name": "search_memory",
|
|
||||||
"description": """Search in memory""",
|
|
||||||
"parameters": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"reasoning": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "reasoning behind the intent"
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"required": ["reasoning"]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
conversation_history = []
|
|
||||||
|
|
||||||
# Create a LocalAGI instance
|
|
||||||
logger.info("Creating LocalAGI instance")
|
|
||||||
localagi = LocalAGI(
|
|
||||||
agent_actions=agent_actions,
|
|
||||||
llm_model=LLM_MODEL,
|
|
||||||
tts_model=VOICE_MODEL,
|
|
||||||
tts_api_base=TTS_API_BASE,
|
|
||||||
functions_model=FUNCTIONS_MODEL,
|
|
||||||
api_base=LOCALAI_API_BASE,
|
|
||||||
stablediffusion_api_base=IMAGE_API_BASE,
|
|
||||||
stablediffusion_model=STABLEDIFFUSION_MODEL,
|
|
||||||
force_action=args.force_action,
|
|
||||||
plan_message=args.plan_message,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set a system prompt if SYSTEM_PROMPT is set
|
|
||||||
if SYSTEM_PROMPT != "":
|
|
||||||
conversation_history.append({
|
|
||||||
"role": "system",
|
|
||||||
"content": SYSTEM_PROMPT
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info("Welcome to LocalAGI")
|
|
||||||
|
|
||||||
# Skip avatar creation if --skip-avatar is set
|
|
||||||
if not args.skip_avatar:
|
|
||||||
logger.info("Creating avatar, please wait...")
|
|
||||||
display_avatar(localagi)
|
|
||||||
|
|
||||||
actions = ""
|
|
||||||
for action in agent_actions:
|
|
||||||
actions+=" '"+action+"'"
|
|
||||||
logger.info("LocalAGI internally can do the following actions:{actions}", actions=actions)
|
|
||||||
|
|
||||||
if not args.prompt:
|
|
||||||
logger.info(">>> Interactive mode <<<")
|
|
||||||
else:
|
|
||||||
logger.info(">>> Prompt mode <<<")
|
|
||||||
logger.info(args.prompt)
|
|
||||||
|
|
||||||
# IF in prompt mode just evaluate, otherwise loop
|
|
||||||
if args.prompt:
|
|
||||||
conversation_history=localagi.evaluate(
|
|
||||||
args.prompt,
|
|
||||||
conversation_history,
|
|
||||||
critic=args.critic,
|
|
||||||
re_evaluate=args.re_evaluate,
|
|
||||||
# Enable to lower context usage but increases LLM calls
|
|
||||||
postprocess=args.postprocess,
|
|
||||||
subtaskContext=args.subtaskContext,
|
|
||||||
)
|
|
||||||
localagi.tts_play(conversation_history[-1]["content"])
|
|
||||||
|
|
||||||
if not args.prompt or args.interactive:
|
|
||||||
# TODO: process functions also considering the conversation history? conversation history + input
|
|
||||||
logger.info(">>> Ready! What can I do for you? ( try with: plan a roadtrip to San Francisco ) <<<")
|
|
||||||
|
|
||||||
while True:
|
|
||||||
user_input = input(">>> ")
|
|
||||||
# we are going to use the args to change the evaluation behavior
|
|
||||||
conversation_history=localagi.evaluate(
|
|
||||||
user_input,
|
|
||||||
conversation_history,
|
|
||||||
critic=args.critic,
|
|
||||||
re_evaluate=args.re_evaluate,
|
|
||||||
# Enable to lower context usage but increases LLM calls
|
|
||||||
postprocess=args.postprocess,
|
|
||||||
subtaskContext=args.subtaskContext,
|
|
||||||
)
|
|
||||||
localagi.tts_play(conversation_history[-1]["content"])
|
|
||||||
172
pkg/client/agents.go
Normal file
172
pkg/client/agents.go
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
package localagi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// AgentConfig represents the configuration for an agent
|
||||||
|
type AgentConfig struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Actions []string `json:"actions,omitempty"`
|
||||||
|
Connectors []string `json:"connectors,omitempty"`
|
||||||
|
PromptBlocks []string `json:"prompt_blocks,omitempty"`
|
||||||
|
InitialPrompt string `json:"initial_prompt,omitempty"`
|
||||||
|
Parallel bool `json:"parallel,omitempty"`
|
||||||
|
Config map[string]interface{} `json:"config,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// AgentStatus represents the status of an agent
|
||||||
|
type AgentStatus struct {
|
||||||
|
Status string `json:"status"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListAgents returns a list of all agents
|
||||||
|
func (c *Client) ListAgents() ([]string, error) {
|
||||||
|
resp, err := c.doRequest(http.MethodGet, "/agents", nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// The response is HTML, so we'll need to parse it properly
|
||||||
|
// For now, we'll just return a placeholder implementation
|
||||||
|
return []string{}, fmt.Errorf("ListAgents not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAgentConfig retrieves the configuration for a specific agent
|
||||||
|
func (c *Client) GetAgentConfig(name string) (*AgentConfig, error) {
|
||||||
|
path := fmt.Sprintf("/api/agent/%s/config", name)
|
||||||
|
resp, err := c.doRequest(http.MethodGet, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var config AgentConfig
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&config); err != nil {
|
||||||
|
return nil, fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateAgent creates a new agent with the given configuration
|
||||||
|
func (c *Client) CreateAgent(config *AgentConfig) error {
|
||||||
|
resp, err := c.doRequest(http.MethodPost, "/api/agent/create", config)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response map[string]string
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if status, ok := response["status"]; ok && status == "ok" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to create agent: %v", response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// UpdateAgentConfig updates the configuration for an existing agent
|
||||||
|
func (c *Client) UpdateAgentConfig(name string, config *AgentConfig) error {
|
||||||
|
// Ensure the name in the URL matches the name in the config
|
||||||
|
config.Name = name
|
||||||
|
path := fmt.Sprintf("/api/agent/%s/config", name)
|
||||||
|
|
||||||
|
resp, err := c.doRequest(http.MethodPut, path, config)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response map[string]string
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if status, ok := response["status"]; ok && status == "ok" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to update agent: %v", response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteAgent removes an agent
|
||||||
|
func (c *Client) DeleteAgent(name string) error {
|
||||||
|
path := fmt.Sprintf("/api/agent/%s", name)
|
||||||
|
resp, err := c.doRequest(http.MethodDelete, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response map[string]string
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if status, ok := response["status"]; ok && status == "ok" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to delete agent: %v", response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// PauseAgent pauses an agent
|
||||||
|
func (c *Client) PauseAgent(name string) error {
|
||||||
|
path := fmt.Sprintf("/api/agent/pause/%s", name)
|
||||||
|
resp, err := c.doRequest(http.MethodPut, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response map[string]string
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if status, ok := response["status"]; ok && status == "ok" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to pause agent: %v", response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// StartAgent starts a paused agent
|
||||||
|
func (c *Client) StartAgent(name string) error {
|
||||||
|
path := fmt.Sprintf("/api/agent/start/%s", name)
|
||||||
|
resp, err := c.doRequest(http.MethodPut, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response map[string]string
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if status, ok := response["status"]; ok && status == "ok" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return fmt.Errorf("failed to start agent: %v", response)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExportAgent exports an agent configuration
|
||||||
|
func (c *Client) ExportAgent(name string) (*AgentConfig, error) {
|
||||||
|
path := fmt.Sprintf("/settings/export/%s", name)
|
||||||
|
resp, err := c.doRequest(http.MethodGet, path, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var config AgentConfig
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&config); err != nil {
|
||||||
|
return nil, fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &config, nil
|
||||||
|
}
|
||||||
65
pkg/client/chat.go
Normal file
65
pkg/client/chat.go
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
package localagi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Message represents a chat message
|
||||||
|
type Message struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatResponse represents a response from the agent
|
||||||
|
type ChatResponse struct {
|
||||||
|
Response string `json:"response"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// SendMessage sends a message to an agent
|
||||||
|
func (c *Client) SendMessage(agentName, message string) error {
|
||||||
|
path := fmt.Sprintf("/chat/%s", agentName)
|
||||||
|
|
||||||
|
msg := Message{
|
||||||
|
Message: message,
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.doRequest(http.MethodPost, path, msg)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
// The response is HTML, so it's not easily parseable in this context
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Notify sends a notification to an agent
|
||||||
|
func (c *Client) Notify(agentName, message string) error {
|
||||||
|
path := fmt.Sprintf("/notify/%s", agentName)
|
||||||
|
|
||||||
|
// URL encoded form data
|
||||||
|
form := strings.NewReader(fmt.Sprintf("message=%s", message))
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodGet, c.BaseURL+path, form)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error creating request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.APIKey != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error making request: %w", err)
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode >= 400 {
|
||||||
|
return fmt.Errorf("api error (status %d)", resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
76
pkg/client/client.go
Normal file
76
pkg/client/client.go
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
package localagi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Client represents a client for the LocalAGI API
|
||||||
|
type Client struct {
|
||||||
|
BaseURL string
|
||||||
|
APIKey string
|
||||||
|
HTTPClient *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient creates a new LocalAGI client
|
||||||
|
func NewClient(baseURL string, apiKey string, timeout time.Duration) *Client {
|
||||||
|
if timeout == 0 {
|
||||||
|
timeout = time.Second * 30
|
||||||
|
}
|
||||||
|
return &Client{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
APIKey: apiKey,
|
||||||
|
HTTPClient: &http.Client{
|
||||||
|
Timeout: timeout,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetTimeout sets the HTTP client timeout
|
||||||
|
func (c *Client) SetTimeout(timeout time.Duration) {
|
||||||
|
c.HTTPClient.Timeout = timeout
|
||||||
|
}
|
||||||
|
|
||||||
|
// doRequest performs an HTTP request and returns the response
|
||||||
|
func (c *Client) doRequest(method, path string, body interface{}) (*http.Response, error) {
|
||||||
|
var reqBody io.Reader
|
||||||
|
if body != nil {
|
||||||
|
jsonData, err := json.Marshal(body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error marshaling request body: %w", err)
|
||||||
|
}
|
||||||
|
reqBody = bytes.NewBuffer(jsonData)
|
||||||
|
}
|
||||||
|
|
||||||
|
url := fmt.Sprintf("%s%s", c.BaseURL, path)
|
||||||
|
req, err := http.NewRequest(method, url, reqBody)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error creating request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.APIKey != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
if body != nil {
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.HTTPClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("error making request: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode >= 400 {
|
||||||
|
// Read the error response
|
||||||
|
defer resp.Body.Close()
|
||||||
|
errorData, _ := io.ReadAll(resp.Body)
|
||||||
|
return resp, fmt.Errorf("api error (status %d): %s", resp.StatusCode, string(errorData))
|
||||||
|
}
|
||||||
|
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
127
pkg/client/responses.go
Normal file
127
pkg/client/responses.go
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
package localagi
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RequestBody represents the message request to the AI model
|
||||||
|
type RequestBody struct {
|
||||||
|
Model string `json:"model"`
|
||||||
|
Input any `json:"input"`
|
||||||
|
Temperature *float64 `json:"temperature,omitempty"`
|
||||||
|
MaxTokens *int `json:"max_output_tokens,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// InputMessage represents a user input message
|
||||||
|
type InputMessage struct {
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content any `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ContentItem represents an item in a content array
|
||||||
|
type ContentItem struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Text string `json:"text,omitempty"`
|
||||||
|
ImageURL string `json:"image_url,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResponseBody represents the response from the AI model
|
||||||
|
type ResponseBody struct {
|
||||||
|
CreatedAt int64 `json:"created_at"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Error any `json:"error,omitempty"`
|
||||||
|
Output []ResponseMessage `json:"output"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ResponseMessage represents a message in the response
|
||||||
|
type ResponseMessage struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Status string `json:"status"`
|
||||||
|
Role string `json:"role"`
|
||||||
|
Content []MessageContentItem `json:"content"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageContentItem represents a content item in a message
|
||||||
|
type MessageContentItem struct {
|
||||||
|
Type string `json:"type"`
|
||||||
|
Text string `json:"text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetAIResponse sends a request to the AI model and returns the response
|
||||||
|
func (c *Client) GetAIResponse(request *RequestBody) (*ResponseBody, error) {
|
||||||
|
resp, err := c.doRequest(http.MethodPost, "/v1/responses", request)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
var response ResponseBody
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&response); err != nil {
|
||||||
|
return nil, fmt.Errorf("error decoding response: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if there was an error in the response
|
||||||
|
if response.Error != nil {
|
||||||
|
return nil, fmt.Errorf("api error: %v", response.Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return &response, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// SimpleAIResponse is a helper function to get a simple text response from the AI
|
||||||
|
func (c *Client) SimpleAIResponse(agentName, input string) (string, error) {
|
||||||
|
temperature := 0.7
|
||||||
|
request := &RequestBody{
|
||||||
|
Model: agentName,
|
||||||
|
Input: input,
|
||||||
|
Temperature: &temperature,
|
||||||
|
}
|
||||||
|
|
||||||
|
response, err := c.GetAIResponse(request)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the text response from the output
|
||||||
|
for _, msg := range response.Output {
|
||||||
|
if msg.Role == "assistant" {
|
||||||
|
for _, content := range msg.Content {
|
||||||
|
if content.Type == "output_text" {
|
||||||
|
return content.Text, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", fmt.Errorf("no text response found")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ChatAIResponse sends chat messages to the AI model
|
||||||
|
func (c *Client) ChatAIResponse(agentName string, messages []InputMessage) (string, error) {
|
||||||
|
temperature := 0.7
|
||||||
|
request := &RequestBody{
|
||||||
|
Model: agentName,
|
||||||
|
Input: messages,
|
||||||
|
Temperature: &temperature,
|
||||||
|
}
|
||||||
|
|
||||||
|
response, err := c.GetAIResponse(request)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract the text response from the output
|
||||||
|
for _, msg := range response.Output {
|
||||||
|
if msg.Role == "assistant" {
|
||||||
|
for _, content := range msg.Content {
|
||||||
|
if content.Type == "output_text" {
|
||||||
|
return content.Text, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", fmt.Errorf("no text response found")
|
||||||
|
}
|
||||||
42
pkg/config/meta.go
Normal file
42
pkg/config/meta.go
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
package config
|
||||||
|
|
||||||
|
type FieldType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
FieldTypeNumber FieldType = "number"
|
||||||
|
FieldTypeText FieldType = "text"
|
||||||
|
FieldTypeTextarea FieldType = "textarea"
|
||||||
|
FieldTypeCheckbox FieldType = "checkbox"
|
||||||
|
FieldTypeSelect FieldType = "select"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Tags struct {
|
||||||
|
Section string `json:"section,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FieldOption struct {
|
||||||
|
Value string `json:"value"`
|
||||||
|
Label string `json:"label"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Field struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Type FieldType `json:"type"`
|
||||||
|
Label string `json:"label"`
|
||||||
|
DefaultValue any `json:"defaultValue"`
|
||||||
|
Placeholder string `json:"placeholder,omitempty"`
|
||||||
|
HelpText string `json:"helpText,omitempty"`
|
||||||
|
Required bool `json:"required,omitempty"`
|
||||||
|
Disabled bool `json:"disabled,omitempty"`
|
||||||
|
Options []FieldOption `json:"options,omitempty"`
|
||||||
|
Min float32 `json:"min,omitempty"`
|
||||||
|
Max float32 `json:"max,omitempty"`
|
||||||
|
Step float32 `json:"step,omitempty"`
|
||||||
|
Tags Tags `json:"tags,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FieldGroup struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
Label string `json:"label"`
|
||||||
|
Fields []Field `json:"fields"`
|
||||||
|
}
|
||||||
112
pkg/deepface/client.go
Normal file
112
pkg/deepface/client.go
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
package deepface
|
||||||
|
|
||||||
|
// A simple Golang client for repository: https://github.com/serengil/deepface
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
type DeepFaceClient struct {
|
||||||
|
BaseURL string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewClient(baseURL string) *DeepFaceClient {
|
||||||
|
return &DeepFaceClient{BaseURL: baseURL}
|
||||||
|
}
|
||||||
|
|
||||||
|
func encodeImageToBase64(imgPath string) (string, error) {
|
||||||
|
file, err := os.Open(imgPath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
if _, err := io.Copy(buf, file); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return base64.StdEncoding.EncodeToString(buf.Bytes()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DeepFaceClient) Represent(modelName, imgPath string) error {
|
||||||
|
imgBase64, err := encodeImageToBase64(imgPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
data := map[string]string{
|
||||||
|
"model_name": modelName,
|
||||||
|
"img": imgBase64,
|
||||||
|
}
|
||||||
|
jsonData, _ := json.Marshal(data)
|
||||||
|
|
||||||
|
resp, err := http.Post(c.BaseURL+"/represent", "application/json", bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
fmt.Println("Response:", string(body))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DeepFaceClient) Verify(img1Path, img2Path, modelName, detector, metric string) error {
|
||||||
|
img1Base64, err := encodeImageToBase64(img1Path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
img2Base64, err := encodeImageToBase64(img2Path)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
data := map[string]string{
|
||||||
|
"img1": img1Base64,
|
||||||
|
"img2": img2Base64,
|
||||||
|
"model_name": modelName,
|
||||||
|
"detector_backend": detector,
|
||||||
|
"distance_metric": metric,
|
||||||
|
}
|
||||||
|
jsonData, _ := json.Marshal(data)
|
||||||
|
|
||||||
|
resp, err := http.Post(c.BaseURL+"/verify", "application/json", bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
fmt.Println("Response:", string(body))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *DeepFaceClient) Analyze(imgPath string, actions []string) error {
|
||||||
|
imgBase64, err := encodeImageToBase64(imgPath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
data := map[string]interface{}{
|
||||||
|
"img": imgBase64,
|
||||||
|
"actions": actions,
|
||||||
|
}
|
||||||
|
jsonData, _ := json.Marshal(data)
|
||||||
|
|
||||||
|
resp, err := http.Post(c.BaseURL+"/analyze", "application/json", bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, _ := io.ReadAll(resp.Body)
|
||||||
|
fmt.Println("Response:", string(body))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
28
pkg/llm/client.go
Normal file
28
pkg/llm/client.go
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
package llm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
func NewClient(APIKey, URL, timeout string) *openai.Client {
|
||||||
|
// Set up OpenAI client
|
||||||
|
if APIKey == "" {
|
||||||
|
//log.Fatal("OPENAI_API_KEY environment variable not set")
|
||||||
|
APIKey = "sk-xxx"
|
||||||
|
}
|
||||||
|
config := openai.DefaultConfig(APIKey)
|
||||||
|
config.BaseURL = URL
|
||||||
|
|
||||||
|
dur, err := time.ParseDuration(timeout)
|
||||||
|
if err != nil {
|
||||||
|
dur = 150 * time.Second
|
||||||
|
}
|
||||||
|
|
||||||
|
config.HTTPClient = &http.Client{
|
||||||
|
Timeout: dur,
|
||||||
|
}
|
||||||
|
return openai.NewClientWithConfig(config)
|
||||||
|
}
|
||||||
57
pkg/llm/json.go
Normal file
57
pkg/llm/json.go
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
package llm
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
"github.com/sashabaranov/go-openai/jsonschema"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GenerateTypedJSON(ctx context.Context, client *openai.Client, guidance, model string, i jsonschema.Definition, dst any) error {
|
||||||
|
toolName := "json"
|
||||||
|
decision := openai.ChatCompletionRequest{
|
||||||
|
Model: model,
|
||||||
|
Messages: []openai.ChatCompletionMessage{
|
||||||
|
{
|
||||||
|
Role: "user",
|
||||||
|
Content: guidance,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Tools: []openai.Tool{
|
||||||
|
{
|
||||||
|
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: openai.FunctionDefinition{
|
||||||
|
Name: toolName,
|
||||||
|
Parameters: i,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ToolChoice: openai.ToolChoice{
|
||||||
|
Type: openai.ToolTypeFunction,
|
||||||
|
Function: openai.ToolFunction{Name: toolName},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := client.CreateChatCompletion(ctx, decision)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Choices) != 1 {
|
||||||
|
return fmt.Errorf("no choices: %d", len(resp.Choices))
|
||||||
|
}
|
||||||
|
|
||||||
|
msg := resp.Choices[0].Message
|
||||||
|
|
||||||
|
if len(msg.ToolCalls) == 0 {
|
||||||
|
return fmt.Errorf("no tool calls: %d", len(msg.ToolCalls))
|
||||||
|
}
|
||||||
|
|
||||||
|
xlog.Debug("JSON generated", "Arguments", msg.ToolCalls[0].Function.Arguments)
|
||||||
|
|
||||||
|
return json.Unmarshal([]byte(msg.ToolCalls[0].Function.Arguments), dst)
|
||||||
|
}
|
||||||
389
pkg/localrag/client.go
Normal file
389
pkg/localrag/client.go
Normal file
@@ -0,0 +1,389 @@
|
|||||||
|
// TODO: this is a duplicate of LocalRAG/pkg/client
|
||||||
|
package localrag
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"mime/multipart"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/mudler/LocalAGI/core/agent"
|
||||||
|
"github.com/mudler/LocalAGI/pkg/xlog"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ agent.RAGDB = &WrappedClient{}
|
||||||
|
|
||||||
|
type WrappedClient struct {
|
||||||
|
*Client
|
||||||
|
collection string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewWrappedClient(baseURL, apiKey, collection string) *WrappedClient {
|
||||||
|
wc := &WrappedClient{
|
||||||
|
Client: NewClient(baseURL, apiKey),
|
||||||
|
collection: collection,
|
||||||
|
}
|
||||||
|
|
||||||
|
wc.CreateCollection(collection)
|
||||||
|
|
||||||
|
return wc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WrappedClient) Count() int {
|
||||||
|
entries, err := c.ListEntries(c.collection)
|
||||||
|
if err != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
return len(entries)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WrappedClient) Reset() error {
|
||||||
|
return c.Client.Reset(c.collection)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WrappedClient) Search(s string, similarity int) ([]string, error) {
|
||||||
|
results, err := c.Client.Search(c.collection, s, similarity)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
var res []string
|
||||||
|
for _, r := range results {
|
||||||
|
res = append(res, fmt.Sprintf("%s (%+v)", r.Content, r.Metadata))
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *WrappedClient) Store(s string) error {
|
||||||
|
// the Client API of LocalRAG takes only files at the moment.
|
||||||
|
// So we take the string that we want to store, write it to a file, and then store the file.
|
||||||
|
t := time.Now()
|
||||||
|
dateTime := t.Format("2006-01-02-15-04-05")
|
||||||
|
hash := md5.Sum([]byte(s))
|
||||||
|
fileName := fmt.Sprintf("%s-%s.%s", dateTime, hex.EncodeToString(hash[:]), "txt")
|
||||||
|
|
||||||
|
xlog.Debug("Storing string in LocalRAG", "collection", c.collection, "fileName", fileName)
|
||||||
|
|
||||||
|
tempdir, err := os.MkdirTemp("", "localrag")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer os.RemoveAll(tempdir)
|
||||||
|
|
||||||
|
f := filepath.Join(tempdir, fileName)
|
||||||
|
err = os.WriteFile(f, []byte(s), 0644)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
defer os.Remove(f)
|
||||||
|
return c.Client.Store(c.collection, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Result represents a single result from a query.
|
||||||
|
type Result struct {
|
||||||
|
ID string
|
||||||
|
Metadata map[string]string
|
||||||
|
Embedding []float32
|
||||||
|
Content string
|
||||||
|
|
||||||
|
// The cosine similarity between the query and the document.
|
||||||
|
// The higher the value, the more similar the document is to the query.
|
||||||
|
// The value is in the range [-1, 1].
|
||||||
|
Similarity float32
|
||||||
|
}
|
||||||
|
|
||||||
|
// Client is a client for the RAG API
|
||||||
|
type Client struct {
|
||||||
|
BaseURL string
|
||||||
|
APIKey string
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewClient creates a new RAG API client
|
||||||
|
func NewClient(baseURL, apiKey string) *Client {
|
||||||
|
return &Client{
|
||||||
|
BaseURL: baseURL,
|
||||||
|
APIKey: apiKey,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add a helper method to set the Authorization header
|
||||||
|
func (c *Client) addAuthHeader(req *http.Request) {
|
||||||
|
if c.APIKey == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateCollection creates a new collection
|
||||||
|
func (c *Client) CreateCollection(name string) error {
|
||||||
|
url := fmt.Sprintf("%s/api/collections", c.BaseURL)
|
||||||
|
|
||||||
|
type request struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
payload, err := json.Marshal(request{Name: name})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(payload))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusCreated {
|
||||||
|
return errors.New("failed to create collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListCollections lists all collections
|
||||||
|
func (c *Client) ListCollections() ([]string, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/collections", c.BaseURL)
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.New("failed to list collections")
|
||||||
|
}
|
||||||
|
|
||||||
|
var collections []string
|
||||||
|
err = json.NewDecoder(resp.Body).Decode(&collections)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return collections, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ListEntries lists all entries in a collection
|
||||||
|
func (c *Client) ListEntries(collection string) ([]string, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/collections/%s/entries", c.BaseURL, collection)
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.New("failed to list entries")
|
||||||
|
}
|
||||||
|
|
||||||
|
var entries []string
|
||||||
|
err = json.NewDecoder(resp.Body).Decode(&entries)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return entries, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeleteEntry deletes an entry in a collection
|
||||||
|
func (c *Client) DeleteEntry(collection, entry string) ([]string, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/collections/%s/entry/delete", c.BaseURL, collection)
|
||||||
|
|
||||||
|
type request struct {
|
||||||
|
Entry string `json:"entry"`
|
||||||
|
}
|
||||||
|
|
||||||
|
payload, err := json.Marshal(request{Entry: entry})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodDelete, url, bytes.NewBuffer(payload))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
bodyResult := new(bytes.Buffer)
|
||||||
|
bodyResult.ReadFrom(resp.Body)
|
||||||
|
return nil, errors.New("failed to delete entry: " + bodyResult.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []string
|
||||||
|
err = json.NewDecoder(resp.Body).Decode(&results)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search searches a collection
|
||||||
|
func (c *Client) Search(collection, query string, maxResults int) ([]Result, error) {
|
||||||
|
url := fmt.Sprintf("%s/api/collections/%s/search", c.BaseURL, collection)
|
||||||
|
|
||||||
|
type request struct {
|
||||||
|
Query string `json:"query"`
|
||||||
|
MaxResults int `json:"max_results"`
|
||||||
|
}
|
||||||
|
|
||||||
|
payload, err := json.Marshal(request{Query: query, MaxResults: maxResults})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, bytes.NewBuffer(payload))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.New("failed to search collection")
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []Result
|
||||||
|
err = json.NewDecoder(resp.Body).Decode(&results)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset resets a collection
|
||||||
|
func (c *Client) Reset(collection string) error {
|
||||||
|
url := fmt.Sprintf("%s/api/collections/%s/reset", c.BaseURL, collection)
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
b.ReadFrom(resp.Body)
|
||||||
|
return errors.New("failed to reset collection: " + b.String())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store uploads a file to a collection
|
||||||
|
func (c *Client) Store(collection, filePath string) error {
|
||||||
|
url := fmt.Sprintf("%s/api/collections/%s/upload", c.BaseURL, collection)
|
||||||
|
|
||||||
|
file, err := os.Open(filePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
|
||||||
|
body := &bytes.Buffer{}
|
||||||
|
writer := multipart.NewWriter(body)
|
||||||
|
|
||||||
|
part, err := writer.CreateFormFile("file", file.Name())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = io.Copy(part, file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = writer.Close()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, body)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||||
|
c.addAuthHeader(req)
|
||||||
|
|
||||||
|
client := &http.Client{}
|
||||||
|
resp, err := client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
b := new(bytes.Buffer)
|
||||||
|
b.ReadFrom(resp.Body)
|
||||||
|
|
||||||
|
type response struct {
|
||||||
|
Error string `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var r response
|
||||||
|
err = json.Unmarshal(b.Bytes(), &r)
|
||||||
|
if err == nil {
|
||||||
|
return errors.New("failed to upload file: " + r.Error)
|
||||||
|
}
|
||||||
|
|
||||||
|
return errors.New("failed to upload file")
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
9
pkg/utils/html.go
Normal file
9
pkg/utils/html.go
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import "strings"
|
||||||
|
|
||||||
|
func HTMLify(s string) string {
|
||||||
|
s = strings.TrimSpace(s)
|
||||||
|
s = strings.ReplaceAll(s, "\n", "<br>")
|
||||||
|
return s
|
||||||
|
}
|
||||||
113
pkg/vectorstore/chromem.go
Normal file
113
pkg/vectorstore/chromem.go
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
package vectorstore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"runtime"
|
||||||
|
|
||||||
|
"github.com/philippgille/chromem-go"
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ChromemDB struct {
|
||||||
|
collectionName string
|
||||||
|
collection *chromem.Collection
|
||||||
|
index int
|
||||||
|
client *openai.Client
|
||||||
|
db *chromem.DB
|
||||||
|
embeddingsModel string
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewChromemDB(collection, path string, openaiClient *openai.Client, embeddingsModel string) (*ChromemDB, error) {
|
||||||
|
// db, err := chromem.NewPersistentDB(path, true)
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
db := chromem.NewDB()
|
||||||
|
|
||||||
|
chromem := &ChromemDB{
|
||||||
|
collectionName: collection,
|
||||||
|
index: 1,
|
||||||
|
db: db,
|
||||||
|
client: openaiClient,
|
||||||
|
embeddingsModel: embeddingsModel,
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := db.GetOrCreateCollection(collection, nil, chromem.embedding())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
chromem.collection = c
|
||||||
|
|
||||||
|
return chromem, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChromemDB) Count() int {
|
||||||
|
return c.collection.Count()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChromemDB) Reset() error {
|
||||||
|
if err := c.db.DeleteCollection(c.collectionName); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
collection, err := c.db.GetOrCreateCollection(c.collectionName, nil, c.embedding())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
c.collection = collection
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChromemDB) embedding() chromem.EmbeddingFunc {
|
||||||
|
return chromem.EmbeddingFunc(
|
||||||
|
func(ctx context.Context, text string) ([]float32, error) {
|
||||||
|
resp, err := c.client.CreateEmbeddings(ctx,
|
||||||
|
openai.EmbeddingRequestStrings{
|
||||||
|
Input: []string{text},
|
||||||
|
Model: openai.EmbeddingModel(c.embeddingsModel),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return []float32{}, fmt.Errorf("error getting keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Data) == 0 {
|
||||||
|
return []float32{}, fmt.Errorf("no response from OpenAI API")
|
||||||
|
}
|
||||||
|
|
||||||
|
embedding := resp.Data[0].Embedding
|
||||||
|
|
||||||
|
return embedding, nil
|
||||||
|
},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChromemDB) Store(s string) error {
|
||||||
|
defer func() {
|
||||||
|
c.index++
|
||||||
|
}()
|
||||||
|
if s == "" {
|
||||||
|
return fmt.Errorf("empty string")
|
||||||
|
}
|
||||||
|
return c.collection.AddDocuments(context.Background(), []chromem.Document{
|
||||||
|
{
|
||||||
|
Content: s,
|
||||||
|
ID: fmt.Sprint(c.index),
|
||||||
|
},
|
||||||
|
}, runtime.NumCPU())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *ChromemDB) Search(s string, similarEntries int) ([]string, error) {
|
||||||
|
res, err := c.collection.Query(context.Background(), s, similarEntries, nil, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var results []string
|
||||||
|
for _, r := range res {
|
||||||
|
results = append(results, r.Content)
|
||||||
|
}
|
||||||
|
|
||||||
|
return results, nil
|
||||||
|
}
|
||||||
86
pkg/vectorstore/localai.go
Normal file
86
pkg/vectorstore/localai.go
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
package vectorstore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/sashabaranov/go-openai"
|
||||||
|
)
|
||||||
|
|
||||||
|
type LocalAIRAGDB struct {
|
||||||
|
client *StoreClient
|
||||||
|
openaiClient *openai.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewLocalAIRAGDB(storeClient *StoreClient, openaiClient *openai.Client) *LocalAIRAGDB {
|
||||||
|
return &LocalAIRAGDB{
|
||||||
|
client: storeClient,
|
||||||
|
openaiClient: openaiClient,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *LocalAIRAGDB) Reset() error {
|
||||||
|
return fmt.Errorf("not implemented")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *LocalAIRAGDB) Count() int {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *LocalAIRAGDB) Store(s string) error {
|
||||||
|
resp, err := db.openaiClient.CreateEmbeddings(context.TODO(),
|
||||||
|
openai.EmbeddingRequestStrings{
|
||||||
|
Input: []string{s},
|
||||||
|
Model: openai.AdaEmbeddingV2,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error getting keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Data) == 0 {
|
||||||
|
return fmt.Errorf("no response from OpenAI API")
|
||||||
|
}
|
||||||
|
|
||||||
|
embedding := resp.Data[0].Embedding
|
||||||
|
|
||||||
|
setReq := SetRequest{
|
||||||
|
Keys: [][]float32{embedding},
|
||||||
|
Values: []string{s},
|
||||||
|
}
|
||||||
|
err = db.client.Set(setReq)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("error setting keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (db *LocalAIRAGDB) Search(s string, similarEntries int) ([]string, error) {
|
||||||
|
resp, err := db.openaiClient.CreateEmbeddings(context.TODO(),
|
||||||
|
openai.EmbeddingRequestStrings{
|
||||||
|
Input: []string{s},
|
||||||
|
Model: openai.AdaEmbeddingV2,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return []string{}, fmt.Errorf("error getting keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Data) == 0 {
|
||||||
|
return []string{}, fmt.Errorf("no response from OpenAI API")
|
||||||
|
}
|
||||||
|
embedding := resp.Data[0].Embedding
|
||||||
|
|
||||||
|
// Find example
|
||||||
|
findReq := FindRequest{
|
||||||
|
TopK: similarEntries, // Number of similar entries you want to find
|
||||||
|
Key: embedding, // The key you're looking for similarities to
|
||||||
|
}
|
||||||
|
findResp, err := db.client.Find(findReq)
|
||||||
|
if err != nil {
|
||||||
|
return []string{}, fmt.Errorf("error finding keys: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return findResp.Values, nil
|
||||||
|
}
|
||||||
161
pkg/vectorstore/store.go
Normal file
161
pkg/vectorstore/store.go
Normal file
@@ -0,0 +1,161 @@
|
|||||||
|
package vectorstore
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Define a struct to hold your store API client
|
||||||
|
type StoreClient struct {
|
||||||
|
BaseURL string
|
||||||
|
APIToken string
|
||||||
|
Client *http.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
// Define request and response struct formats based on the API documentation
|
||||||
|
type SetRequest struct {
|
||||||
|
Keys [][]float32 `json:"keys"`
|
||||||
|
Values []string `json:"values"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetRequest struct {
|
||||||
|
Keys [][]float32 `json:"keys"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetResponse struct {
|
||||||
|
Keys [][]float32 `json:"keys"`
|
||||||
|
Values []string `json:"values"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type DeleteRequest struct {
|
||||||
|
Keys [][]float32 `json:"keys"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindRequest struct {
|
||||||
|
TopK int `json:"topk"`
|
||||||
|
Key []float32 `json:"key"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type FindResponse struct {
|
||||||
|
Keys [][]float32 `json:"keys"`
|
||||||
|
Values []string `json:"values"`
|
||||||
|
Similarities []float32 `json:"similarities"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Constructor for StoreClient
|
||||||
|
func NewStoreClient(baseUrl, apiToken string) *StoreClient {
|
||||||
|
return &StoreClient{
|
||||||
|
BaseURL: baseUrl,
|
||||||
|
APIToken: apiToken,
|
||||||
|
Client: &http.Client{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement Set method
|
||||||
|
func (c *StoreClient) Set(req SetRequest) error {
|
||||||
|
return c.doRequest("stores/set", req)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement Get method
|
||||||
|
func (c *StoreClient) Get(req GetRequest) (*GetResponse, error) {
|
||||||
|
body, err := c.doRequestWithResponse("stores/get", req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp GetResponse
|
||||||
|
err = json.Unmarshal(body, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement Delete method
|
||||||
|
func (c *StoreClient) Delete(req DeleteRequest) error {
|
||||||
|
return c.doRequest("stores/delete", req)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Implement Find method
|
||||||
|
func (c *StoreClient) Find(req FindRequest) (*FindResponse, error) {
|
||||||
|
body, err := c.doRequestWithResponse("stores/find", req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp FindResponse
|
||||||
|
err = json.Unmarshal(body, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to perform a request without expecting a response body
|
||||||
|
func (c *StoreClient) doRequest(path string, data interface{}) error {
|
||||||
|
jsonData, err := json.Marshal(data)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", c.BaseURL+"/"+path, bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// Set Bearer token
|
||||||
|
if c.APIToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIToken)
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
|
||||||
|
resp, err := c.Client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("API request to %s failed with status code %d", path, resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to perform a request and parse the response body
|
||||||
|
func (c *StoreClient) doRequestWithResponse(path string, data interface{}) ([]byte, error) {
|
||||||
|
jsonData, err := json.Marshal(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", c.BaseURL+"/"+path, bytes.NewBuffer(jsonData))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/json")
|
||||||
|
// Set Bearer token
|
||||||
|
if c.APIToken != "" {
|
||||||
|
req.Header.Set("Authorization", "Bearer "+c.APIToken)
|
||||||
|
}
|
||||||
|
resp, err := c.Client.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("API request to %s failed with status code %d", path, resp.StatusCode)
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := ioutil.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return body, nil
|
||||||
|
}
|
||||||
71
pkg/xlog/xlog.go
Normal file
71
pkg/xlog/xlog.go
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
package xlog
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log/slog"
|
||||||
|
"os"
|
||||||
|
"runtime"
|
||||||
|
)
|
||||||
|
|
||||||
|
var logger *slog.Logger
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
var level = slog.LevelDebug
|
||||||
|
|
||||||
|
switch os.Getenv("LOG_LEVEL") {
|
||||||
|
case "info":
|
||||||
|
level = slog.LevelInfo
|
||||||
|
case "warn":
|
||||||
|
level = slog.LevelWarn
|
||||||
|
case "error":
|
||||||
|
level = slog.LevelError
|
||||||
|
case "debug":
|
||||||
|
level = slog.LevelDebug
|
||||||
|
}
|
||||||
|
|
||||||
|
var opts = &slog.HandlerOptions{
|
||||||
|
Level: level,
|
||||||
|
}
|
||||||
|
|
||||||
|
var handler slog.Handler
|
||||||
|
|
||||||
|
if os.Getenv("LOG_FORMAT") == "json" {
|
||||||
|
handler = slog.NewJSONHandler(os.Stdout, opts)
|
||||||
|
} else {
|
||||||
|
handler = slog.NewTextHandler(os.Stdout, opts)
|
||||||
|
}
|
||||||
|
logger = slog.New(handler)
|
||||||
|
}
|
||||||
|
|
||||||
|
func _log(level slog.Level, msg string, args ...any) {
|
||||||
|
_, f, l, _ := runtime.Caller(2)
|
||||||
|
group := slog.Group(
|
||||||
|
"source",
|
||||||
|
slog.Attr{
|
||||||
|
Key: "file",
|
||||||
|
Value: slog.AnyValue(f),
|
||||||
|
},
|
||||||
|
slog.Attr{
|
||||||
|
Key: "L",
|
||||||
|
Value: slog.AnyValue(l),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
args = append(args, group)
|
||||||
|
logger.Log(context.Background(), level, msg, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Info(msg string, args ...any) {
|
||||||
|
_log(slog.LevelInfo, msg, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Debug(msg string, args ...any) {
|
||||||
|
_log(slog.LevelDebug, msg, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Error(msg string, args ...any) {
|
||||||
|
_log(slog.LevelError, msg, args...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Warn(msg string, args ...any) {
|
||||||
|
_log(slog.LevelWarn, msg, args...)
|
||||||
|
}
|
||||||
72
pkg/xstrings/split.go
Normal file
72
pkg/xstrings/split.go
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
package xstrings
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// SplitTextByLength splits text into chunks of specified maxLength,
|
||||||
|
// preserving complete words and special characters like newlines.
|
||||||
|
// It returns a slice of strings, each with length <= maxLength.
|
||||||
|
func SplitParagraph(text string, maxLength int) []string {
|
||||||
|
// Handle edge cases
|
||||||
|
if maxLength <= 0 || len(text) == 0 {
|
||||||
|
return []string{text}
|
||||||
|
}
|
||||||
|
|
||||||
|
var chunks []string
|
||||||
|
remainingText := text
|
||||||
|
|
||||||
|
for len(remainingText) > 0 {
|
||||||
|
// If remaining text fits in a chunk, add it and we're done
|
||||||
|
if len(remainingText) <= maxLength {
|
||||||
|
chunks = append(chunks, remainingText)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to find a good split point near the max length
|
||||||
|
splitIndex := maxLength
|
||||||
|
|
||||||
|
// Look backward from the max length to find a space or newline
|
||||||
|
for splitIndex > 0 && !isWhitespace(rune(remainingText[splitIndex])) {
|
||||||
|
splitIndex--
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we couldn't find a good split point (no whitespace),
|
||||||
|
// look forward for the next whitespace
|
||||||
|
if splitIndex == 0 {
|
||||||
|
splitIndex = maxLength
|
||||||
|
// If we can't find whitespace forward, we'll have to split a word
|
||||||
|
for splitIndex < len(remainingText) && !isWhitespace(rune(remainingText[splitIndex])) {
|
||||||
|
splitIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we still couldn't find whitespace, take the whole string
|
||||||
|
if splitIndex == len(remainingText) {
|
||||||
|
chunks = append(chunks, remainingText)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the chunk up to the split point
|
||||||
|
chunk := remainingText[:splitIndex]
|
||||||
|
|
||||||
|
// Preserve trailing newlines with the current chunk
|
||||||
|
if splitIndex < len(remainingText) && remainingText[splitIndex] == '\n' {
|
||||||
|
chunk += string(remainingText[splitIndex])
|
||||||
|
splitIndex++
|
||||||
|
}
|
||||||
|
|
||||||
|
chunks = append(chunks, chunk)
|
||||||
|
|
||||||
|
// Remove leading whitespace from the next chunk
|
||||||
|
remainingText = remainingText[splitIndex:]
|
||||||
|
remainingText = strings.TrimLeftFunc(remainingText, isWhitespace)
|
||||||
|
}
|
||||||
|
|
||||||
|
return chunks
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper function to determine if a character is whitespace
|
||||||
|
func isWhitespace(r rune) bool {
|
||||||
|
return r == ' ' || r == '\t' || r == '\n' || r == '\r'
|
||||||
|
}
|
||||||
79
pkg/xstrings/split_test.go
Normal file
79
pkg/xstrings/split_test.go
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
package xstrings_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
xtrings "github.com/mudler/LocalAGI/pkg/xstrings"
|
||||||
|
. "github.com/onsi/ginkgo/v2"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("SplitParagraph", func() {
|
||||||
|
It("should return the text as a single chunk if it's shorter than maxLen", func() {
|
||||||
|
text := "Short text"
|
||||||
|
maxLen := 20
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"Short text"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should split the text into chunks of maxLen without truncating words", func() {
|
||||||
|
text := "This is a longer text that needs to be split into chunks."
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"This is a", "longer", "text that", "needs to", "be split", "into", "chunks."}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle texts with multiple spaces and newlines correctly", func() {
|
||||||
|
text := "This is\na\ntext with\n\nmultiple spaces and\nnewlines."
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"This is\na\n", "text with\n", "multiple", "spaces ", "and\n", "newlines."}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with a single word longer than maxLen", func() {
|
||||||
|
text := "supercalifragilisticexpialidocious"
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"supercalifragilisticexpialidocious"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with empty lines", func() {
|
||||||
|
text := "line1\n\nline2"
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"line1\n\n", "line2"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with leading and trailing spaces", func() {
|
||||||
|
text := " leading spaces and trailing spaces "
|
||||||
|
maxLen := 15
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{" leading", "spaces and", "trailing spaces"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with only spaces", func() {
|
||||||
|
text := " "
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{" "}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle empty string", func() {
|
||||||
|
text := ""
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{""}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with only newlines", func() {
|
||||||
|
text := "\n\n\n"
|
||||||
|
maxLen := 10
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"\n\n\n"}))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should handle a text with special characters", func() {
|
||||||
|
text := "This is a text with special characters !@#$%^&*()"
|
||||||
|
maxLen := 20
|
||||||
|
result := xtrings.SplitParagraph(text, maxLen)
|
||||||
|
Expect(result).To(Equal([]string{"This is a text with", "special characters", "!@#$%^&*()"}))
|
||||||
|
})
|
||||||
|
})
|
||||||
15
pkg/xstrings/uniq.go
Normal file
15
pkg/xstrings/uniq.go
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package xstrings
|
||||||
|
|
||||||
|
type Comparable interface{ ~int | ~int64 | ~string }
|
||||||
|
|
||||||
|
func UniqueSlice[T Comparable](s []T) []T {
|
||||||
|
keys := make(map[T]bool)
|
||||||
|
list := []T{}
|
||||||
|
for _, entry := range s {
|
||||||
|
if _, value := keys[entry]; !value {
|
||||||
|
keys[entry] = true
|
||||||
|
list = append(list, entry)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return list
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user