diff --git a/bot/bot.go b/bot/bot.go index c9b3d01..a0282ca 100644 --- a/bot/bot.go +++ b/bot/bot.go @@ -30,7 +30,7 @@ type Bot struct { extractor *extractor.Extractor stats *stats.Stats models ModelSelection - history map[int64]*MessageRingBuffer + history map[int64]*MessageHistory profile BotInfo markdownV1Replacer *strings.Replacer @@ -48,7 +48,7 @@ func NewBot( extractor: extractor, stats: stats.NewStats(), models: models, - history: make(map[int64]*MessageRingBuffer), + history: make(map[int64]*MessageHistory), profile: BotInfo{0, "", ""}, markdownV1Replacer: strings.NewReplacer( @@ -129,7 +129,7 @@ func (b *Bot) textMessageHandler(bot *telego.Bot, update telego.Update) { slog.Info("/any-message", "type", "private") b.processMention(message) default: - slog.Debug("/any-message", "info", "Message is not mention, reply or private chat. Skipping.") + slog.Debug("/any-message", "info", "MessageData is not mention, reply or private chat. Skipping.") } } @@ -144,7 +144,13 @@ func (b *Bot) processMention(message *telego.Message) { requestContext := b.createLlmRequestContextFromMessage(message) - llmReply, err := b.llm.HandleChatMessage(message.Text, b.models.TextRequestModel, requestContext) + userMessageData := tgUserMessageToMessageData(message) + + llmReply, err := b.llm.HandleChatMessage( + messageDataToLlmMessage(userMessageData), + b.models.TextRequestModel, + requestContext, + ) if err != nil { slog.Error("Cannot get reply from LLM connector") @@ -227,9 +233,11 @@ func (b *Bot) summarizeHandler(bot *telego.Bot, update telego.Update) { slog.Debug("Got completion. Going to send.", "llm-completion", llmReply) + replyMarkdown := b.escapeMarkdownV1Symbols(llmReply) + message := tu.Message( chatID, - b.escapeMarkdownV1Symbols(llmReply), + replyMarkdown, ).WithParseMode("Markdown") _, err = bot.SendMessage(b.reply(update.Message, message)) @@ -239,6 +247,8 @@ func (b *Bot) summarizeHandler(bot *telego.Bot, update telego.Update) { b.trySendReplyError(update.Message) } + + b.saveBotReplyToHistory(update.Message, replyMarkdown) } func (b *Bot) helpHandler(bot *telego.Bot, update telego.Update) { diff --git a/bot/message_history.go b/bot/message_history.go index 781e518..e58ee2d 100644 --- a/bot/message_history.go +++ b/bot/message_history.go @@ -7,25 +7,28 @@ import ( const HistoryLength = 150 -type Message struct { - Name string - Text string - IsMe bool +type MessageData struct { + Name string + Username string + Text string + IsMe bool + IsUserRequest bool + ReplyTo *MessageData } -type MessageRingBuffer struct { - messages []Message +type MessageHistory struct { + messages []MessageData capacity int } -func NewMessageBuffer(capacity int) *MessageRingBuffer { - return &MessageRingBuffer{ - messages: make([]Message, 0, capacity), +func NewMessageHistory(capacity int) *MessageHistory { + return &MessageHistory{ + messages: make([]MessageData, 0, capacity), capacity: capacity, } } -func (b *MessageRingBuffer) Push(element Message) { +func (b *MessageHistory) Push(element MessageData) { if len(b.messages) >= b.capacity { b.messages = b.messages[1:] } @@ -33,7 +36,7 @@ func (b *MessageRingBuffer) Push(element Message) { b.messages = append(b.messages, element) } -func (b *MessageRingBuffer) GetAll() []Message { +func (b *MessageHistory) GetAll() []MessageData { return b.messages } @@ -50,43 +53,72 @@ func (b *Bot) saveChatMessageToHistory(message *telego.Message) { _, ok := b.history[chatId] if !ok { - b.history[chatId] = NewMessageBuffer(HistoryLength) + b.history[chatId] = NewMessageHistory(HistoryLength) } - b.history[chatId].Push(Message{ - Name: message.From.FirstName, - Text: message.Text, - IsMe: false, - }) + msgData := tgUserMessageToMessageData(message) + + b.history[chatId].Push(msgData) } -func (b *Bot) saveBotReplyToHistory(message *telego.Message, reply string) { - chatId := message.Chat.ID +func (b *Bot) saveBotReplyToHistory(replyTo *telego.Message, text string) { + chatId := replyTo.Chat.ID slog.Info( "history-reply-save", "chat", chatId, - "to_id", message.From.ID, - "to_name", message.From.FirstName, - "text", reply, + "to_id", replyTo.From.ID, + "to_name", replyTo.From.FirstName, + "text", text, ) _, ok := b.history[chatId] if !ok { - b.history[chatId] = NewMessageBuffer(HistoryLength) + b.history[chatId] = NewMessageHistory(HistoryLength) } - b.history[chatId].Push(Message{ - Name: b.profile.Username, - Text: reply, - IsMe: true, - }) + msgData := MessageData{ + Name: b.profile.Name, + Username: b.profile.Username, + Text: text, + IsMe: true, + } + + if replyTo.ReplyToMessage != nil { + replyMessage := replyTo.ReplyToMessage + + msgData.ReplyTo = &MessageData{ + Name: replyMessage.From.FirstName, + Username: replyMessage.From.Username, + Text: replyMessage.Text, + IsMe: false, + ReplyTo: nil, + } + } + + b.history[chatId].Push(msgData) } -func (b *Bot) getChatHistory(chatId int64) []Message { +func tgUserMessageToMessageData(message *telego.Message) MessageData { + msgData := MessageData{ + Name: message.From.FirstName, + Username: message.From.Username, + Text: message.Text, + IsMe: false, + } + + if message.ReplyToMessage != nil { + replyData := tgUserMessageToMessageData(message.ReplyToMessage) + msgData.ReplyTo = &replyData + } + + return msgData +} + +func (b *Bot) getChatHistory(chatId int64) []MessageData { _, ok := b.history[chatId] if !ok { - return make([]Message, 0) + return make([]MessageData, 0) } return b.history[chatId].GetAll() diff --git a/bot/request_context.go b/bot/request_context.go index a045bef..24999ec 100644 --- a/bot/request_context.go +++ b/bot/request_context.go @@ -48,17 +48,14 @@ func (b *Bot) createLlmRequestContextFromMessage(message *telego.Message) llm.Re return rc } -func historyToLlmMessages(history []Message) []llm.ChatMessage { +func historyToLlmMessages(history []MessageData) []llm.ChatMessage { length := len(history) if length > 0 { result := make([]llm.ChatMessage, 0, length) for _, msg := range history { - result = append(result, llm.ChatMessage{ - Name: msg.Name, - Text: msg.Text, - }) + result = append(result, messageDataToLlmMessage(msg)) } return result @@ -66,3 +63,20 @@ func historyToLlmMessages(history []Message) []llm.ChatMessage { return make([]llm.ChatMessage, 0) } + +func messageDataToLlmMessage(data MessageData) llm.ChatMessage { + llmMessage := llm.ChatMessage{ + Name: data.Name, + Username: data.Username, + Text: data.Text, + IsMe: data.IsMe, + IsUserRequest: data.IsUserRequest, + } + + if data.ReplyTo != nil { + replyMessage := messageDataToLlmMessage(*data.ReplyTo) + llmMessage.ReplyTo = &replyMessage + } + + return llmMessage +} diff --git a/llm/llm.go b/llm/llm.go index b6517ef..0cb68a8 100644 --- a/llm/llm.go +++ b/llm/llm.go @@ -6,7 +6,6 @@ import ( "github.com/sashabaranov/go-openai" "log/slog" "strconv" - "strings" ) var ( @@ -29,7 +28,7 @@ func NewConnector(baseUrl string, token string) *LlmConnector { } } -func (l *LlmConnector) HandleChatMessage(text string, model string, requestContext RequestContext) (string, error) { +func (l *LlmConnector) HandleChatMessage(userMessage ChatMessage, model string, requestContext RequestContext) (string, error) { systemPrompt := "You're a bot in the Telegram chat.\n" + "You're using a free model called \"" + model + "\".\n\n" + requestContext.Prompt() @@ -52,28 +51,11 @@ func (l *LlmConnector) HandleChatMessage(text string, model string, requestConte if historyLength > 0 { for _, msg := range requestContext.Chat.History { - var msgRole string - var msgText string - - if msg.IsMe { - msgRole = openai.ChatMessageRoleAssistant - msgText = msg.Text - } else { - msgRole = openai.ChatMessageRoleSystem - msgText = "User " + msg.Name + " said:\n" + msg.Text - } - - req.Messages = append(req.Messages, openai.ChatCompletionMessage{ - Role: msgRole, - Content: msgText, - }) + req.Messages = append(req.Messages, chatMessageToOpenAiChatCompletionMessage(msg)) } } - req.Messages = append(req.Messages, openai.ChatCompletionMessage{ - Role: openai.ChatMessageRoleUser, - Content: text, - }) + req.Messages = append(req.Messages, chatMessageToOpenAiChatCompletionMessage(userMessage)) resp, err := l.client.CreateChatCompletion(context.Background(), req) if err != nil { @@ -164,7 +146,3 @@ func (l *LlmConnector) HasModel(id string) bool { return false } - -func quoteMessage(text string) string { - return "> " + strings.ReplaceAll(text, "\n", "\n> ") -} diff --git a/llm/request_context.go b/llm/request_context.go index 518173c..05dd1df 100644 --- a/llm/request_context.go +++ b/llm/request_context.go @@ -1,5 +1,10 @@ package llm +import ( + "github.com/sashabaranov/go-openai" + "strings" +) + type RequestContext struct { Empty bool User UserContext @@ -21,9 +26,12 @@ type ChatContext struct { } type ChatMessage struct { - Name string - Text string - IsMe bool + Name string + Username string + Text string + IsMe bool + IsUserRequest bool + ReplyTo *ChatMessage } func (c RequestContext) Prompt() string { @@ -62,3 +70,54 @@ func (c RequestContext) Prompt() string { return prompt } + +func chatMessageToOpenAiChatCompletionMessage(message ChatMessage) openai.ChatCompletionMessage { + var msgRole string + var msgText string + + switch { + case message.IsMe: + msgRole = openai.ChatMessageRoleAssistant + case message.IsUserRequest: + msgRole = openai.ChatMessageRoleUser + default: + msgRole = openai.ChatMessageRoleSystem + } + + if message.IsMe { + msgText = message.Text + } else { + msgText = chatMessageToText(message) + } + + return openai.ChatCompletionMessage{ + Role: msgRole, + Content: msgText, + } +} + +func chatMessageToText(message ChatMessage) string { + var msgText string + + if message.ReplyTo == nil { + msgText += "In reply to:" + msgText += quoteText(presentUserMessageAsText(*message.ReplyTo)) + "\n\n" + } + msgText += presentUserMessageAsText(message) + + return msgText +} + +func presentUserMessageAsText(message ChatMessage) string { + result := message.Name + if message.Username != "" { + result += " (@" + message.Username + ")" + } + result += " wrote:\n" + message.Text + + return result +} + +func quoteText(text string) string { + return "> " + strings.ReplaceAll(text, "\n", "\n> ") +}