mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00

* start breaking up the giant channel refactor now that it's better understood - easier to merge bites Signed-off-by: Dave Lee <dave@gray101.com> * add concurrency and base64 back in, along with new base64 tests. Signed-off-by: Dave Lee <dave@gray101.com> * Automatic rename of whisper.go's Result to TranscriptResult Signed-off-by: Dave Lee <dave@gray101.com> * remove pkg/concurrency - significant changes coming in split 2 Signed-off-by: Dave Lee <dave@gray101.com> * fix comments Signed-off-by: Dave Lee <dave@gray101.com> * add list_model service as another low-risk service to get it out of the way Signed-off-by: Dave Lee <dave@gray101.com> * split backend config loader into seperate file from the actual config struct. No changes yet, just reduce cognative load with smaller files of logical blocks Signed-off-by: Dave Lee <dave@gray101.com> * rename state.go ==> application.go Signed-off-by: Dave Lee <dave@gray101.com> * fix lost import? Signed-off-by: Dave Lee <dave@gray101.com> --------- Signed-off-by: Dave Lee <dave@gray101.com>
72 lines
1.8 KiB
Go
72 lines
1.8 KiB
Go
package services
|
|
|
|
import (
|
|
"regexp"
|
|
|
|
"github.com/go-skynet/LocalAI/core/config"
|
|
"github.com/go-skynet/LocalAI/core/schema"
|
|
"github.com/go-skynet/LocalAI/pkg/model"
|
|
)
|
|
|
|
type ListModelsService struct {
|
|
bcl *config.BackendConfigLoader
|
|
ml *model.ModelLoader
|
|
appConfig *config.ApplicationConfig
|
|
}
|
|
|
|
func NewListModelsService(ml *model.ModelLoader, bcl *config.BackendConfigLoader, appConfig *config.ApplicationConfig) *ListModelsService {
|
|
return &ListModelsService{
|
|
bcl: bcl,
|
|
ml: ml,
|
|
appConfig: appConfig,
|
|
}
|
|
}
|
|
|
|
func (lms *ListModelsService) ListModels(filter string, excludeConfigured bool) ([]schema.OpenAIModel, error) {
|
|
|
|
models, err := lms.ml.ListModels()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
var mm map[string]interface{} = map[string]interface{}{}
|
|
|
|
dataModels := []schema.OpenAIModel{}
|
|
|
|
var filterFn func(name string) bool
|
|
|
|
// If filter is not specified, do not filter the list by model name
|
|
if filter == "" {
|
|
filterFn = func(_ string) bool { return true }
|
|
} else {
|
|
// If filter _IS_ specified, we compile it to a regex which is used to create the filterFn
|
|
rxp, err := regexp.Compile(filter)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
filterFn = func(name string) bool {
|
|
return rxp.MatchString(name)
|
|
}
|
|
}
|
|
|
|
// Start with the known configurations
|
|
for _, c := range lms.bcl.GetAllBackendConfigs() {
|
|
if excludeConfigured {
|
|
mm[c.Model] = nil
|
|
}
|
|
|
|
if filterFn(c.Name) {
|
|
dataModels = append(dataModels, schema.OpenAIModel{ID: c.Name, Object: "model"})
|
|
}
|
|
}
|
|
|
|
// Then iterate through the loose files:
|
|
for _, m := range models {
|
|
// And only adds them if they shouldn't be skipped.
|
|
if _, exists := mm[m]; !exists && filterFn(m) {
|
|
dataModels = append(dataModels, schema.OpenAIModel{ID: m, Object: "model"})
|
|
}
|
|
}
|
|
|
|
return dataModels, nil
|
|
}
|