feat(llama.cpp): estimate vram usage

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2025-05-02 10:23:30 +02:00
parent bace6516f1
commit 7f654fece7
7 changed files with 131 additions and 21 deletions

View file

@ -16,6 +16,22 @@ func GPUs() ([]*gpu.GraphicsCard, error) {
return gpu.GraphicsCards, nil
}
func TotalAvailableVRAM() (uint64, error) {
gpus, err := GPUs()
if err != nil {
return 0, err
}
var totalVRAM uint64
for _, gpu := range gpus {
if gpu.Node.Memory.TotalUsableBytes > 0 {
totalVRAM += uint64(gpu.Node.Memory.TotalUsableBytes)
}
}
return totalVRAM, nil
}
func HasGPU(vendor string) bool {
gpus, err := GPUs()
if err != nil {