Changing default model to LLaMa3 (#22)
Changing default model to LLaMa3 Reviewed-on: #22
This commit is contained in:
parent
db92c01131
commit
c4665338db
|
@ -122,7 +122,7 @@ func (b *Bot) inlineHandler(bot *telego.Bot, update telego.Update) {
|
||||||
slog.Error("Cannot retrieve an article using extractor", "error", err)
|
slog.Error("Cannot retrieve an article using extractor", "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
llmReply, err := b.llm.Summarize(article.Text, llm.ModelMistralUncensored)
|
llmReply, err := b.llm.Summarize(article.Text, llm.ModelLlama3Uncensored )
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Cannot get reply from LLM connector")
|
slog.Error("Cannot get reply from LLM connector")
|
||||||
|
|
||||||
|
@ -148,7 +148,7 @@ func (b *Bot) inlineHandler(bot *telego.Bot, update telego.Update) {
|
||||||
|
|
||||||
requestContext := createLlmRequestContextFromUpdate(update)
|
requestContext := createLlmRequestContextFromUpdate(update)
|
||||||
|
|
||||||
llmReply, err := b.llm.HandleSingleRequest(iq.Query, llm.ModelMistralUncensored, requestContext)
|
llmReply, err := b.llm.HandleSingleRequest(iq.Query, llm.ModelLlama3Uncensored, requestContext)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Cannot get reply from LLM connector")
|
slog.Error("Cannot get reply from LLM connector")
|
||||||
|
|
||||||
|
@ -194,7 +194,7 @@ func (b *Bot) heyHandler(bot *telego.Bot, update telego.Update) {
|
||||||
|
|
||||||
requestContext := createLlmRequestContextFromUpdate(update)
|
requestContext := createLlmRequestContextFromUpdate(update)
|
||||||
|
|
||||||
llmReply, err := b.llm.HandleSingleRequest(userMessage, llm.ModelMistralUncensored, requestContext)
|
llmReply, err := b.llm.HandleSingleRequest(userMessage, llm.ModelLlama3Uncensored, requestContext)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Cannot get reply from LLM connector")
|
slog.Error("Cannot get reply from LLM connector")
|
||||||
|
|
||||||
|
@ -259,7 +259,7 @@ func (b *Bot) summarizeHandler(bot *telego.Bot, update telego.Update) {
|
||||||
slog.Error("Cannot retrieve an article using extractor", "error", err)
|
slog.Error("Cannot retrieve an article using extractor", "error", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
llmReply, err := b.llm.Summarize(article.Text, llm.ModelMistralUncensored)
|
llmReply, err := b.llm.Summarize(article.Text, llm.ModelLlama3Uncensored)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("Cannot get reply from LLM connector")
|
slog.Error("Cannot get reply from LLM connector")
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,8 @@ var (
|
||||||
ErrLlmBackendRequestFailed = errors.New("llm back-end request failed")
|
ErrLlmBackendRequestFailed = errors.New("llm back-end request failed")
|
||||||
ErrNoChoices = errors.New("no choices in LLM response")
|
ErrNoChoices = errors.New("no choices in LLM response")
|
||||||
|
|
||||||
ModelMistralUncensored = "dolphin-mistral"
|
ModelMistralUncensored = "dolphin-mistral:7b-v2.8-q4_K_M"
|
||||||
|
ModelLlama3Uncensored = "dolphin-llama3:8b-v2.9-q4_K_M"
|
||||||
)
|
)
|
||||||
|
|
||||||
type LlmConnector struct {
|
type LlmConnector struct {
|
||||||
|
|
Loading…
Reference in a new issue