Updating deps, fixing model existence checking for prefixed models #43
48
llm/llm.go
48
llm/llm.go
|
@ -5,6 +5,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/sashabaranov/go-openai"
|
"github.com/sashabaranov/go-openai"
|
||||||
"log/slog"
|
"log/slog"
|
||||||
|
"slices"
|
||||||
"strconv"
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -113,36 +114,33 @@ func (l *LlmConnector) Summarize(text string, model string) (string, error) {
|
||||||
return resp.Choices[0].Message.Content, nil
|
return resp.Choices[0].Message.Content, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (l *LlmConnector) GetModels() []string {
|
func (l *LlmConnector) HasAllModels(modelIds []string) (bool, map[string]bool) {
|
||||||
var result []string
|
modelList, err := l.client.ListModels(context.Background())
|
||||||
|
|
||||||
models, err := l.client.ListModels(context.Background())
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
slog.Error("llm: Model list request failed", "error", err)
|
slog.Error("llm: Model list request failed", "error", err)
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
}
|
||||||
|
|
||||||
slog.Info("Model list retrieved", "models", models)
|
slog.Info("llm: Returned model list", "models", modelList)
|
||||||
|
slog.Info("llm: Checking for requested models", "requested", modelIds)
|
||||||
|
|
||||||
for _, model := range models.Models {
|
requestedModelsCount := len(modelIds)
|
||||||
result = append(result, model.ID)
|
searchResult := make(map[string]bool, requestedModelsCount)
|
||||||
|
|
||||||
|
for _, modelId := range modelIds {
|
||||||
|
searchResult[modelId] = false
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
for _, model := range modelList.Models {
|
||||||
}
|
if slices.Contains(modelIds, model.ID) {
|
||||||
|
searchResult[model.ID] = true
|
||||||
func (l *LlmConnector) HasModel(id string) bool {
|
}
|
||||||
model, err := l.client.GetModel(context.Background(), id)
|
}
|
||||||
if err != nil {
|
|
||||||
slog.Error("llm: Model request failed", "error", err)
|
for _, v := range searchResult {
|
||||||
}
|
if !v {
|
||||||
|
return false, searchResult
|
||||||
slog.Debug("llm: Returned model", "model", model)
|
}
|
||||||
|
}
|
||||||
if model.ID != "" {
|
|
||||||
return true
|
return true, searchResult
|
||||||
}
|
|
||||||
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
7
main.go
7
main.go
|
@ -28,12 +28,11 @@ func main() {
|
||||||
|
|
||||||
slog.Info("Checking models availability")
|
slog.Info("Checking models availability")
|
||||||
|
|
||||||
for _, model := range []string{models.TextRequestModel, models.SummarizeModel} {
|
hasAll, searchResult := llmc.HasAllModels([]string{models.TextRequestModel, models.SummarizeModel})
|
||||||
if !llmc.HasModel(model) {
|
if !hasAll {
|
||||||
slog.Error("Model not unavailable", "model", model)
|
slog.Error("Not all models are available", "result", searchResult)
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
slog.Info("All needed models are available")
|
slog.Info("All needed models are available")
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue