mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
feat(llama.cpp): guess model defaults from file (#2522)
* wip: guess informations from gguf file Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * update go mod Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Small fixups Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Identify llama3 Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Do not try to guess the name, as reading gguf files can be expensive Signed-off-by: Ettore Di Giacinto <mudler@localai.io> * Allow to disable guessing Signed-off-by: Ettore Di Giacinto <mudler@localai.io> --------- Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
23b3d22525
commit
aae7ad9d73
10 changed files with 139 additions and 13 deletions
|
@ -19,17 +19,20 @@ import (
|
|||
)
|
||||
|
||||
type BackendConfigLoader struct {
|
||||
configs map[string]BackendConfig
|
||||
configs map[string]BackendConfig
|
||||
modelPath string
|
||||
sync.Mutex
|
||||
}
|
||||
|
||||
func NewBackendConfigLoader() *BackendConfigLoader {
|
||||
func NewBackendConfigLoader(modelPath string) *BackendConfigLoader {
|
||||
return &BackendConfigLoader{
|
||||
configs: make(map[string]BackendConfig),
|
||||
configs: make(map[string]BackendConfig),
|
||||
modelPath: modelPath,
|
||||
}
|
||||
}
|
||||
|
||||
type LoadOptions struct {
|
||||
modelPath string
|
||||
debug bool
|
||||
threads, ctxSize int
|
||||
f16 bool
|
||||
|
@ -53,6 +56,12 @@ func LoadOptionContextSize(ctxSize int) ConfigLoaderOption {
|
|||
}
|
||||
}
|
||||
|
||||
func ModelPath(modelPath string) ConfigLoaderOption {
|
||||
return func(o *LoadOptions) {
|
||||
o.modelPath = modelPath
|
||||
}
|
||||
}
|
||||
|
||||
func LoadOptionF16(f16 bool) ConfigLoaderOption {
|
||||
return func(o *LoadOptions) {
|
||||
o.f16 = f16
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue