Model configuration and small prompt improvements #24

Merged
skobkin merged 8 commits from feature_model_configuration into main 2024-08-16 00:47:07 +00:00
2 changed files with 9 additions and 8 deletions
Showing only changes of commit 3fdf7fe045 - Show all commits

View file

@ -28,12 +28,12 @@ func NewConnector(baseUrl string, token string) *LlmConnector {
}
func (l *LlmConnector) HandleSingleRequest(text string, model string, requestContext RequestContext) (string, error) {
systemPrompt := "You're a bot in the Telegram chat. " +
"You're using a free model called \"" + model + "\". " +
"You see only messages addressed to you using commands due to privacy settings."
systemPrompt := "You're a bot in the Telegram chat.\n" +
"You're using a free model called \"" + model + "\".\n" +
"Currently you're not able to access chat history, so each message will be replied from a clean slate."
if !requestContext.Empty {
systemPrompt += " " + requestContext.Prompt()
systemPrompt += "\n" + requestContext.Prompt()
}
req := openai.ChatCompletionRequest{

View file

@ -39,15 +39,16 @@ func (c RequestContext) Prompt() string {
prompt += "You're responding to inline query, so you're not in the chat right now. "
}
prompt += "According to their profile, first name of the user who wrote you is \"" + c.User.FirstName + "\". "
prompt += "User profile:" +
"First name: \"" + c.User.FirstName + "\"\n"
if c.User.Username != "" {
prompt += "Their username is @" + c.User.Username + ". "
prompt += "Username: @" + c.User.Username + ".\n"
}
if c.User.LastName != "" {
prompt += "Their last name is \"" + c.User.LastName + "\". "
prompt += "Last name: \"" + c.User.LastName + "\"\n"
}
if c.User.IsPremium {
prompt += "They have Telegram Premium subscription. "
prompt += "Telegram Premium subscription: active."
}
return prompt