From c4665338db6cc40ccd2e1f6a47a1abf6a5e4bb45 Mon Sep 17 00:00:00 2001 From: Alexey Skobkin Date: Mon, 6 May 2024 00:15:36 +0000 Subject: [PATCH] Changing default model to LLaMa3 (#22) Changing default model to LLaMa3 Reviewed-on: https://git.skobk.in/skobkin/telegram-ollama-reply-bot/pulls/22 --- bot/bot.go | 8 ++++---- llm/llm.go | 3 ++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/bot/bot.go b/bot/bot.go index 0d6601d..828819b 100644 --- a/bot/bot.go +++ b/bot/bot.go @@ -122,7 +122,7 @@ func (b *Bot) inlineHandler(bot *telego.Bot, update telego.Update) { slog.Error("Cannot retrieve an article using extractor", "error", err) } - llmReply, err := b.llm.Summarize(article.Text, llm.ModelMistralUncensored) + llmReply, err := b.llm.Summarize(article.Text, llm.ModelLlama3Uncensored ) if err != nil { slog.Error("Cannot get reply from LLM connector") @@ -148,7 +148,7 @@ func (b *Bot) inlineHandler(bot *telego.Bot, update telego.Update) { requestContext := createLlmRequestContextFromUpdate(update) - llmReply, err := b.llm.HandleSingleRequest(iq.Query, llm.ModelMistralUncensored, requestContext) + llmReply, err := b.llm.HandleSingleRequest(iq.Query, llm.ModelLlama3Uncensored, requestContext) if err != nil { slog.Error("Cannot get reply from LLM connector") @@ -194,7 +194,7 @@ func (b *Bot) heyHandler(bot *telego.Bot, update telego.Update) { requestContext := createLlmRequestContextFromUpdate(update) - llmReply, err := b.llm.HandleSingleRequest(userMessage, llm.ModelMistralUncensored, requestContext) + llmReply, err := b.llm.HandleSingleRequest(userMessage, llm.ModelLlama3Uncensored, requestContext) if err != nil { slog.Error("Cannot get reply from LLM connector") @@ -259,7 +259,7 @@ func (b *Bot) summarizeHandler(bot *telego.Bot, update telego.Update) { slog.Error("Cannot retrieve an article using extractor", "error", err) } - llmReply, err := b.llm.Summarize(article.Text, llm.ModelMistralUncensored) + llmReply, err := b.llm.Summarize(article.Text, llm.ModelLlama3Uncensored) if err != nil { slog.Error("Cannot get reply from LLM connector") diff --git a/llm/llm.go b/llm/llm.go index 6b2d2c5..73affe8 100644 --- a/llm/llm.go +++ b/llm/llm.go @@ -11,7 +11,8 @@ var ( ErrLlmBackendRequestFailed = errors.New("llm back-end request failed") ErrNoChoices = errors.New("no choices in LLM response") - ModelMistralUncensored = "dolphin-mistral" + ModelMistralUncensored = "dolphin-mistral:7b-v2.8-q4_K_M" + ModelLlama3Uncensored = "dolphin-llama3:8b-v2.9-q4_K_M" ) type LlmConnector struct {