telegram-ollama-reply-bot/bot/request_context.go
Alexey Skobkin f28670834d
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
Model configuration and small prompt improvements (#24)
- Model can be configured using ENV
- Summarization prompt improved
- Single text request prompt improved
- Chat context prompt improved

Co-authored-by: Alexey Skobkin <skobkin-ru@ya.ru>
Reviewed-on: #24
2024-08-16 00:47:07 +00:00

61 lines
1.1 KiB
Go

package bot
import (
"github.com/mymmrac/telego"
"log/slog"
"telegram-ollama-reply-bot/llm"
)
func createLlmRequestContextFromUpdate(update telego.Update) llm.RequestContext {
message := update.Message
iq := update.InlineQuery
rc := llm.RequestContext{
Empty: true,
Inline: false,
}
switch {
case message == nil && iq == nil:
slog.Debug("request context creation problem: no message provided. returning empty context.", "request-context", rc)
return rc
case iq != nil:
rc.Inline = true
}
rc.Empty = false
var user *telego.User
if rc.Inline {
user = &iq.From
} else {
user = message.From
}
if user != nil {
rc.User = llm.UserContext{
Username: user.Username,
FirstName: user.FirstName,
LastName: user.LastName,
IsPremium: user.IsPremium,
}
}
if !rc.Inline {
// TODO: implement retrieval of chat description
chat := message.Chat
rc.Chat = llm.ChatContext{
Title: chat.Title,
// TODO: fill when ChatFullInfo retrieved
//Description: chat.Description,
Type: chat.Type,
}
}
slog.Debug("request context created", "request-context", rc)
return rc
}