feat(model-list): be consistent, skip known files from listing (#2760)

fix(model-list): be consistent, skip known files from listing

This changeset does two things:

- Removes the dependency of listing models from the OpenAI schema.
- Tries to reduce confusion between ListModels() in model loader and in
  the service - now there is only one ListModels which is in services
and does not depend anymore on the OpenAI schema
- The OpenAI-schema functions were moved nearby the OpenAI specific
  endpoints that needs the schema
- Drops the ListModel Service structure as there was no real need for
  it.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2024-07-10 15:28:39 +02:00 committed by GitHub
parent 28c6daf916
commit 59ef426fbf
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 97 additions and 70 deletions

View file

@ -27,7 +27,6 @@ func RegisterUIRoutes(app *fiber.App,
appConfig *config.ApplicationConfig,
galleryService *services.GalleryService,
auth func(*fiber.Ctx) error) {
tmpLMS := services.NewListModelsService(ml, cl, appConfig) // TODO: once createApplication() is fully in use, reference the central instance.
// keeps the state of models that are being installed from the UI
var processingModels = xsync.NewSyncedMap[string, string]()
@ -270,7 +269,7 @@ func RegisterUIRoutes(app *fiber.App,
// Show the Chat page
app.Get("/chat/:model", auth, func(c *fiber.Ctx) error {
backendConfigs, _ := tmpLMS.ListModels("", true)
backendConfigs, _ := services.ListModels(cl, ml, "", true)
summary := fiber.Map{
"Title": "LocalAI - Chat with " + c.Params("model"),
@ -285,7 +284,7 @@ func RegisterUIRoutes(app *fiber.App,
})
app.Get("/talk/", auth, func(c *fiber.Ctx) error {
backendConfigs, _ := tmpLMS.ListModels("", true)
backendConfigs, _ := services.ListModels(cl, ml, "", true)
if len(backendConfigs) == 0 {
// If no model is available redirect to the index which suggests how to install models
@ -295,7 +294,7 @@ func RegisterUIRoutes(app *fiber.App,
summary := fiber.Map{
"Title": "LocalAI - Talk",
"ModelsConfig": backendConfigs,
"Model": backendConfigs[0].ID,
"Model": backendConfigs[0],
"IsP2PEnabled": p2p.IsP2PEnabled(),
"Version": internal.PrintableVersion(),
}
@ -306,7 +305,7 @@ func RegisterUIRoutes(app *fiber.App,
app.Get("/chat/", auth, func(c *fiber.Ctx) error {
backendConfigs, _ := tmpLMS.ListModels("", true)
backendConfigs, _ := services.ListModels(cl, ml, "", true)
if len(backendConfigs) == 0 {
// If no model is available redirect to the index which suggests how to install models
@ -314,9 +313,9 @@ func RegisterUIRoutes(app *fiber.App,
}
summary := fiber.Map{
"Title": "LocalAI - Chat with " + backendConfigs[0].ID,
"Title": "LocalAI - Chat with " + backendConfigs[0],
"ModelsConfig": backendConfigs,
"Model": backendConfigs[0].ID,
"Model": backendConfigs[0],
"Version": internal.PrintableVersion(),
"IsP2PEnabled": p2p.IsP2PEnabled(),
}