Chat history tweaks, handlers priority fix, system prompt clarification #28

Merged
skobkin merged 2 commits from feature_chat_history into main 2024-10-27 23:06:45 +00:00
3 changed files with 34 additions and 13 deletions
Showing only changes of commit de5165f5ec - Show all commits

View file

@ -7,6 +7,12 @@ import (
const HistoryLength = 50
type Message struct {
Name string
Text string
IsMe bool
}
type MessageRingBuffer struct {
messages []Message
capacity int
@ -31,11 +37,6 @@ func (b *MessageRingBuffer) GetAll() []Message {
return b.messages
}
type Message struct {
Name string
Text string
}
func (b *Bot) saveChatMessageToHistory(message *telego.Message) {
chatId := message.Chat.ID
@ -55,6 +56,7 @@ func (b *Bot) saveChatMessageToHistory(message *telego.Message) {
b.history[chatId].Push(Message{
Name: message.From.FirstName,
Text: message.Text,
IsMe: false,
})
}
@ -77,6 +79,7 @@ func (b *Bot) saveBotReplyToHistory(message *telego.Message, reply string) {
b.history[chatId].Push(Message{
Name: b.profile.Username,
Text: reply,
IsMe: true,
})
}

View file

@ -37,7 +37,7 @@ func (l *LlmConnector) HandleChatMessage(text string, model string, requestConte
historyLength := len(requestContext.Chat.History)
if historyLength > 0 {
systemPrompt += "\nYou have an access to last " + strconv.Itoa(historyLength) + "messages in this chat."
systemPrompt += "\nYou have access to last " + strconv.Itoa(historyLength) + "messages in this chat."
}
req := openai.ChatCompletionRequest{
@ -52,9 +52,20 @@ func (l *LlmConnector) HandleChatMessage(text string, model string, requestConte
if historyLength > 0 {
for _, msg := range requestContext.Chat.History {
var msgRole string
var msgText string
if msg.IsMe {
msgRole = openai.ChatMessageRoleAssistant
msgText = msg.Text
} else {
msgRole = openai.ChatMessageRoleSystem
msgText = "User " + msg.Name + " said:\n" + msg.Text
}
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
Role: openai.ChatMessageRoleUser,
Content: msg.Name + ":\n\n" + quoteMessage(msg.Text),
Role: msgRole,
Content: msgText,
})
}
}

View file

@ -23,6 +23,7 @@ type ChatContext struct {
type ChatMessage struct {
Name string
Text string
IsMe bool
}
func (c RequestContext) Prompt() string {
@ -34,14 +35,20 @@ func (c RequestContext) Prompt() string {
prompt += "The type of chat you're in is \"" + c.Chat.Type + "\". "
if c.Chat.Type == "group" || c.Chat.Type == "supergroup" {
prompt += "Please consider that there are several users in this chat type who may discuss several unrelated " +
"topics. Try to respond only about the topic you were asked about and only to the user who asked you, " +
"but keep in mind another chat history. "
}
if c.Chat.Title != "" {
prompt += "Chat is called \"" + c.Chat.Title + "\". "
prompt += "\nChat is called \"" + c.Chat.Title + "\". "
}
if c.Chat.Description != "" {
prompt += "Chat description is \"" + c.Chat.Description + "\". "
}
prompt += "Profile of the user who mentioned you in the chat:" +
prompt += "\nProfile of the user who mentioned you in the chat:" +
"First name: \"" + c.User.FirstName + "\"\n"
if c.User.Username != "" {
prompt += "Username: @" + c.User.Username + ".\n"
@ -49,9 +56,9 @@ func (c RequestContext) Prompt() string {
if c.User.LastName != "" {
prompt += "Last name: \"" + c.User.LastName + "\"\n"
}
if c.User.IsPremium {
prompt += "Telegram Premium subscription: active."
}
//if c.User.IsPremium {
// prompt += "Telegram Premium subscription: active."
//}
return prompt
}