mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
chore(refactor): group cpu cap detection (#4674)
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
eef80b9880
commit
f9e368b7c4
1 changed files with 17 additions and 32 deletions
|
@ -66,6 +66,17 @@ const (
|
||||||
LocalStoreBackend = "local-store"
|
LocalStoreBackend = "local-store"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var llamaCPPVariants = []string{
|
||||||
|
LLamaCPPAVX2,
|
||||||
|
LLamaCPPAVX,
|
||||||
|
LLamaCPPFallback,
|
||||||
|
LLamaCPPCUDA,
|
||||||
|
LLamaCPPHipblas,
|
||||||
|
LLamaCPPSycl16,
|
||||||
|
LLamaCPPSycl32,
|
||||||
|
LLamaCPPGRPC,
|
||||||
|
}
|
||||||
|
|
||||||
func backendPath(assetDir, backend string) string {
|
func backendPath(assetDir, backend string) string {
|
||||||
return filepath.Join(assetDir, "backend-assets", "grpc", backend)
|
return filepath.Join(assetDir, "backend-assets", "grpc", backend)
|
||||||
}
|
}
|
||||||
|
@ -107,40 +118,14 @@ ENTRY:
|
||||||
if AutoDetect {
|
if AutoDetect {
|
||||||
// if we find the llama.cpp variants, show them of as a single backend (llama-cpp) as later we are going to pick that up
|
// if we find the llama.cpp variants, show them of as a single backend (llama-cpp) as later we are going to pick that up
|
||||||
// when starting the service
|
// when starting the service
|
||||||
foundLCPPAVX, foundLCPPAVX2, foundLCPPFallback, foundLCPPGRPC, foundLCPPCuda, foundLCPPHipblas, foundSycl16, foundSycl32 := false, false, false, false, false, false, false, false
|
foundVariants := map[string]bool{}
|
||||||
if _, ok := backends[LLamaCPP]; !ok {
|
if _, ok := backends[LLamaCPP]; !ok {
|
||||||
for _, e := range entry {
|
for _, e := range entry {
|
||||||
if strings.Contains(e.Name(), LLamaCPPAVX2) && !foundLCPPAVX2 {
|
for _, v := range llamaCPPVariants {
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPAVX2)
|
if strings.Contains(e.Name(), v) && !foundVariants[v] {
|
||||||
foundLCPPAVX2 = true
|
backends[LLamaCPP] = append(backends[LLamaCPP], v)
|
||||||
}
|
foundVariants[v] = true
|
||||||
if strings.Contains(e.Name(), LLamaCPPAVX) && !foundLCPPAVX {
|
}
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPAVX)
|
|
||||||
foundLCPPAVX = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPFallback) && !foundLCPPFallback {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPFallback)
|
|
||||||
foundLCPPFallback = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPGRPC) && !foundLCPPGRPC {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPGRPC)
|
|
||||||
foundLCPPGRPC = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPCUDA) && !foundLCPPCuda {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPCUDA)
|
|
||||||
foundLCPPCuda = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPHipblas) && !foundLCPPHipblas {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPHipblas)
|
|
||||||
foundLCPPHipblas = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPSycl16) && !foundSycl16 {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPSycl16)
|
|
||||||
foundSycl16 = true
|
|
||||||
}
|
|
||||||
if strings.Contains(e.Name(), LLamaCPPSycl32) && !foundSycl32 {
|
|
||||||
backends[LLamaCPP] = append(backends[LLamaCPP], LLamaCPPSycl32)
|
|
||||||
foundSycl32 = true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue