2024-03-10 01:51:01 +00:00
|
|
|
package llm
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"errors"
|
|
|
|
"github.com/sashabaranov/go-openai"
|
|
|
|
"log/slog"
|
2024-10-27 21:35:35 +00:00
|
|
|
"strconv"
|
|
|
|
"strings"
|
2024-03-10 01:51:01 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
ErrLlmBackendRequestFailed = errors.New("llm back-end request failed")
|
|
|
|
ErrNoChoices = errors.New("no choices in LLM response")
|
|
|
|
)
|
|
|
|
|
|
|
|
type LlmConnector struct {
|
|
|
|
client *openai.Client
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewConnector(baseUrl string, token string) *LlmConnector {
|
|
|
|
config := openai.DefaultConfig(token)
|
|
|
|
config.BaseURL = baseUrl
|
|
|
|
|
|
|
|
client := openai.NewClientWithConfig(config)
|
|
|
|
|
|
|
|
return &LlmConnector{
|
|
|
|
client: client,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-10-27 21:35:35 +00:00
|
|
|
func (l *LlmConnector) HandleChatMessage(text string, model string, requestContext RequestContext) (string, error) {
|
2024-08-16 00:47:07 +00:00
|
|
|
systemPrompt := "You're a bot in the Telegram chat.\n" +
|
2024-10-27 21:35:35 +00:00
|
|
|
"You're using a free model called \"" + model + "\".\n\n" +
|
|
|
|
requestContext.Prompt()
|
2024-03-12 22:18:01 +00:00
|
|
|
|
2024-10-27 21:35:35 +00:00
|
|
|
historyLength := len(requestContext.Chat.History)
|
|
|
|
|
|
|
|
if historyLength > 0 {
|
2024-10-27 23:04:35 +00:00
|
|
|
systemPrompt += "\nYou have access to last " + strconv.Itoa(historyLength) + "messages in this chat."
|
2024-03-12 22:18:01 +00:00
|
|
|
}
|
|
|
|
|
2024-03-10 01:51:01 +00:00
|
|
|
req := openai.ChatCompletionRequest{
|
|
|
|
Model: model,
|
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
2024-03-12 22:18:01 +00:00
|
|
|
Role: openai.ChatMessageRoleSystem,
|
|
|
|
Content: systemPrompt,
|
2024-03-10 01:51:01 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2024-10-27 21:35:35 +00:00
|
|
|
if historyLength > 0 {
|
|
|
|
for _, msg := range requestContext.Chat.History {
|
2024-10-27 23:04:35 +00:00
|
|
|
var msgRole string
|
|
|
|
var msgText string
|
|
|
|
|
|
|
|
if msg.IsMe {
|
|
|
|
msgRole = openai.ChatMessageRoleAssistant
|
|
|
|
msgText = msg.Text
|
|
|
|
} else {
|
|
|
|
msgRole = openai.ChatMessageRoleSystem
|
|
|
|
msgText = "User " + msg.Name + " said:\n" + msg.Text
|
|
|
|
}
|
|
|
|
|
2024-10-27 21:35:35 +00:00
|
|
|
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
|
2024-10-27 23:04:35 +00:00
|
|
|
Role: msgRole,
|
|
|
|
Content: msgText,
|
2024-10-27 21:35:35 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-03-10 01:51:01 +00:00
|
|
|
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
|
|
|
|
Role: openai.ChatMessageRoleUser,
|
|
|
|
Content: text,
|
|
|
|
})
|
|
|
|
|
|
|
|
resp, err := l.client.CreateChatCompletion(context.Background(), req)
|
|
|
|
if err != nil {
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Error("llm: LLM back-end request failed", "error", err)
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
return "", ErrLlmBackendRequestFailed
|
|
|
|
}
|
|
|
|
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Debug("llm: Received LLM back-end response", "response", resp)
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
if len(resp.Choices) < 1 {
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Error("llm: LLM back-end reply has no choices")
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
return "", ErrNoChoices
|
|
|
|
}
|
|
|
|
|
|
|
|
return resp.Choices[0].Message.Content, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *LlmConnector) Summarize(text string, model string) (string, error) {
|
|
|
|
req := openai.ChatCompletionRequest{
|
|
|
|
Model: model,
|
|
|
|
Messages: []openai.ChatCompletionMessage{
|
|
|
|
{
|
|
|
|
Role: openai.ChatMessageRoleSystem,
|
2024-03-12 19:12:58 +00:00
|
|
|
Content: "You're a text shortener. Give a very brief summary of the main facts " +
|
2024-08-16 00:47:07 +00:00
|
|
|
"point by point. Format them as a list of bullet points each starting with \"-\". " +
|
2024-03-12 19:12:58 +00:00
|
|
|
"Avoid any commentaries and value judgement on the matter. " +
|
2024-08-16 00:47:07 +00:00
|
|
|
"If possible, respond in the same language as the original text." +
|
|
|
|
"Do not use any non-ASCII characters.",
|
2024-03-10 01:51:01 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
|
|
|
|
Role: openai.ChatMessageRoleUser,
|
|
|
|
Content: text,
|
|
|
|
})
|
|
|
|
|
|
|
|
resp, err := l.client.CreateChatCompletion(context.Background(), req)
|
|
|
|
if err != nil {
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Error("llm: LLM back-end request failed", "error", err)
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
return "", ErrLlmBackendRequestFailed
|
|
|
|
}
|
|
|
|
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Debug("llm: Received LLM back-end response", resp)
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
if len(resp.Choices) < 1 {
|
2024-03-12 19:06:40 +00:00
|
|
|
slog.Error("llm: LLM back-end reply has no choices")
|
2024-03-10 01:51:01 +00:00
|
|
|
|
|
|
|
return "", ErrNoChoices
|
|
|
|
}
|
|
|
|
|
|
|
|
return resp.Choices[0].Message.Content, nil
|
|
|
|
}
|
2024-08-16 00:47:07 +00:00
|
|
|
|
|
|
|
func (l *LlmConnector) GetModels() []string {
|
|
|
|
var result []string
|
|
|
|
|
|
|
|
models, err := l.client.ListModels(context.Background())
|
|
|
|
if err != nil {
|
|
|
|
slog.Error("llm: Model list request failed", "error", err)
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
slog.Info("Model list retrieved", "models", models)
|
|
|
|
|
|
|
|
for _, model := range models.Models {
|
|
|
|
result = append(result, model.ID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *LlmConnector) HasModel(id string) bool {
|
|
|
|
model, err := l.client.GetModel(context.Background(), id)
|
|
|
|
if err != nil {
|
|
|
|
slog.Error("llm: Model request failed", "error", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
slog.Debug("llm: Returned model", "model", model)
|
|
|
|
|
|
|
|
if model.ID != "" {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
2024-10-27 21:35:35 +00:00
|
|
|
|
|
|
|
func quoteMessage(text string) string {
|
|
|
|
return "> " + strings.ReplaceAll(text, "\n", "\n> ")
|
|
|
|
}
|