Presenting chat history as 'system' messages. Presenting bot replies as 'assistant' messages. Tweaking system prompt.
This commit is contained in:
parent
edf2158d29
commit
de5165f5ec
|
@ -7,6 +7,12 @@ import (
|
||||||
|
|
||||||
const HistoryLength = 50
|
const HistoryLength = 50
|
||||||
|
|
||||||
|
type Message struct {
|
||||||
|
Name string
|
||||||
|
Text string
|
||||||
|
IsMe bool
|
||||||
|
}
|
||||||
|
|
||||||
type MessageRingBuffer struct {
|
type MessageRingBuffer struct {
|
||||||
messages []Message
|
messages []Message
|
||||||
capacity int
|
capacity int
|
||||||
|
@ -31,11 +37,6 @@ func (b *MessageRingBuffer) GetAll() []Message {
|
||||||
return b.messages
|
return b.messages
|
||||||
}
|
}
|
||||||
|
|
||||||
type Message struct {
|
|
||||||
Name string
|
|
||||||
Text string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b *Bot) saveChatMessageToHistory(message *telego.Message) {
|
func (b *Bot) saveChatMessageToHistory(message *telego.Message) {
|
||||||
chatId := message.Chat.ID
|
chatId := message.Chat.ID
|
||||||
|
|
||||||
|
@ -55,6 +56,7 @@ func (b *Bot) saveChatMessageToHistory(message *telego.Message) {
|
||||||
b.history[chatId].Push(Message{
|
b.history[chatId].Push(Message{
|
||||||
Name: message.From.FirstName,
|
Name: message.From.FirstName,
|
||||||
Text: message.Text,
|
Text: message.Text,
|
||||||
|
IsMe: false,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -77,6 +79,7 @@ func (b *Bot) saveBotReplyToHistory(message *telego.Message, reply string) {
|
||||||
b.history[chatId].Push(Message{
|
b.history[chatId].Push(Message{
|
||||||
Name: b.profile.Username,
|
Name: b.profile.Username,
|
||||||
Text: reply,
|
Text: reply,
|
||||||
|
IsMe: true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
17
llm/llm.go
17
llm/llm.go
|
@ -37,7 +37,7 @@ func (l *LlmConnector) HandleChatMessage(text string, model string, requestConte
|
||||||
historyLength := len(requestContext.Chat.History)
|
historyLength := len(requestContext.Chat.History)
|
||||||
|
|
||||||
if historyLength > 0 {
|
if historyLength > 0 {
|
||||||
systemPrompt += "\nYou have an access to last " + strconv.Itoa(historyLength) + "messages in this chat."
|
systemPrompt += "\nYou have access to last " + strconv.Itoa(historyLength) + "messages in this chat."
|
||||||
}
|
}
|
||||||
|
|
||||||
req := openai.ChatCompletionRequest{
|
req := openai.ChatCompletionRequest{
|
||||||
|
@ -52,9 +52,20 @@ func (l *LlmConnector) HandleChatMessage(text string, model string, requestConte
|
||||||
|
|
||||||
if historyLength > 0 {
|
if historyLength > 0 {
|
||||||
for _, msg := range requestContext.Chat.History {
|
for _, msg := range requestContext.Chat.History {
|
||||||
|
var msgRole string
|
||||||
|
var msgText string
|
||||||
|
|
||||||
|
if msg.IsMe {
|
||||||
|
msgRole = openai.ChatMessageRoleAssistant
|
||||||
|
msgText = msg.Text
|
||||||
|
} else {
|
||||||
|
msgRole = openai.ChatMessageRoleSystem
|
||||||
|
msgText = "User " + msg.Name + " said:\n" + msg.Text
|
||||||
|
}
|
||||||
|
|
||||||
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
|
req.Messages = append(req.Messages, openai.ChatCompletionMessage{
|
||||||
Role: openai.ChatMessageRoleUser,
|
Role: msgRole,
|
||||||
Content: msg.Name + ":\n\n" + quoteMessage(msg.Text),
|
Content: msgText,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ type ChatContext struct {
|
||||||
type ChatMessage struct {
|
type ChatMessage struct {
|
||||||
Name string
|
Name string
|
||||||
Text string
|
Text string
|
||||||
|
IsMe bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c RequestContext) Prompt() string {
|
func (c RequestContext) Prompt() string {
|
||||||
|
@ -34,14 +35,20 @@ func (c RequestContext) Prompt() string {
|
||||||
|
|
||||||
prompt += "The type of chat you're in is \"" + c.Chat.Type + "\". "
|
prompt += "The type of chat you're in is \"" + c.Chat.Type + "\". "
|
||||||
|
|
||||||
|
if c.Chat.Type == "group" || c.Chat.Type == "supergroup" {
|
||||||
|
prompt += "Please consider that there are several users in this chat type who may discuss several unrelated " +
|
||||||
|
"topics. Try to respond only about the topic you were asked about and only to the user who asked you, " +
|
||||||
|
"but keep in mind another chat history. "
|
||||||
|
}
|
||||||
|
|
||||||
if c.Chat.Title != "" {
|
if c.Chat.Title != "" {
|
||||||
prompt += "Chat is called \"" + c.Chat.Title + "\". "
|
prompt += "\nChat is called \"" + c.Chat.Title + "\". "
|
||||||
}
|
}
|
||||||
if c.Chat.Description != "" {
|
if c.Chat.Description != "" {
|
||||||
prompt += "Chat description is \"" + c.Chat.Description + "\". "
|
prompt += "Chat description is \"" + c.Chat.Description + "\". "
|
||||||
}
|
}
|
||||||
|
|
||||||
prompt += "Profile of the user who mentioned you in the chat:" +
|
prompt += "\nProfile of the user who mentioned you in the chat:" +
|
||||||
"First name: \"" + c.User.FirstName + "\"\n"
|
"First name: \"" + c.User.FirstName + "\"\n"
|
||||||
if c.User.Username != "" {
|
if c.User.Username != "" {
|
||||||
prompt += "Username: @" + c.User.Username + ".\n"
|
prompt += "Username: @" + c.User.Username + ".\n"
|
||||||
|
@ -49,9 +56,9 @@ func (c RequestContext) Prompt() string {
|
||||||
if c.User.LastName != "" {
|
if c.User.LastName != "" {
|
||||||
prompt += "Last name: \"" + c.User.LastName + "\"\n"
|
prompt += "Last name: \"" + c.User.LastName + "\"\n"
|
||||||
}
|
}
|
||||||
if c.User.IsPremium {
|
//if c.User.IsPremium {
|
||||||
prompt += "Telegram Premium subscription: active."
|
// prompt += "Telegram Premium subscription: active."
|
||||||
}
|
//}
|
||||||
|
|
||||||
return prompt
|
return prompt
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue