mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-24 04:25:00 +00:00
feat(llama.cpp/clip): inject gpu options if we detect GPUs
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
701cd6b6d5
commit
49c7ade8a0
3 changed files with 41 additions and 44 deletions
|
@ -4,6 +4,7 @@ import (
|
|||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/mudler/LocalAI/pkg/xsysinfo"
|
||||
"github.com/rs/zerolog/log"
|
||||
gguf "github.com/thxcode/gguf-parser-go"
|
||||
)
|
||||
|
@ -35,4 +36,10 @@ func guessDefaultsFromFile(cfg *BackendConfig, modelPath string, defaultCtx int)
|
|||
}
|
||||
cfg.ContextSize = &defaultCtx
|
||||
}
|
||||
|
||||
if cfg.Options == nil {
|
||||
if xsysinfo.HasGPU("nvidia") || xsysinfo.HasGPU("amd") {
|
||||
cfg.Options = []string{"gpu"}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue