mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
chore: drop gpt4all.cpp (#3106)
chore: drop gpt4all gpt4all is already supported in llama.cpp - the backend was kept for keeping compatibility with old gpt4all models (prior to gguf format). It is good time now to clean up and remove it to slim the compilation process. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
36e185ba63
commit
8814b31805
9 changed files with 7 additions and 177 deletions
|
@ -267,7 +267,7 @@ func RegisterUIRoutes(app *fiber.App,
|
|||
return c.SendString(elements.ProgressBar("100"))
|
||||
}
|
||||
if status.Error != nil {
|
||||
// TODO: instead of deleting the job, we should keep it in the cache and make it dismissable
|
||||
// TODO: instead of deleting the job, we should keep it in the cache and make it dismissable by the user
|
||||
processingModels.DeleteUUID(jobUID)
|
||||
return c.SendString(elements.ErrorProgress(status.Error.Error(), status.GalleryModelName))
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue