chore: drop gpt4all.cpp (#3106)

chore: drop gpt4all

gpt4all is already supported in llama.cpp - the backend was kept for
keeping compatibility with old gpt4all models (prior to gguf format).

It is good time now to clean up and remove it to slim the compilation
process.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2024-08-07 23:35:55 +02:00 committed by GitHub
parent 36e185ba63
commit 8814b31805
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
9 changed files with 7 additions and 177 deletions

View file

@ -21,7 +21,7 @@ func (r *LLamaCPP) Run(ctx *cliContext.Context) error {
err := assets.ExtractFiles(ctx.BackendAssets, r.BackendAssetsPath)
log.Debug().Msgf("Extracting backend assets files to %s", r.BackendAssetsPath)
if err != nil {
log.Warn().Msgf("Failed extracting backend assets files: %s (might be required for some backends to work properly, like gpt4all)", err)
log.Warn().Msgf("Failed extracting backend assets files: %s (might be required for some backends to work properly)", err)
}
if len(os.Args) < 4 {

View file

@ -33,7 +33,7 @@ func (r *P2P) Run(ctx *cliContext.Context) error {
err := assets.ExtractFiles(ctx.BackendAssets, r.BackendAssetsPath)
log.Debug().Msgf("Extracting backend assets files to %s", r.BackendAssetsPath)
if err != nil {
log.Warn().Msgf("Failed extracting backend assets files: %s (might be required for some backends to work properly, like gpt4all)", err)
log.Warn().Msgf("Failed extracting backend assets files: %s (might be required for some backends to work properly)", err)
}
// Check if the token is set