mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-30 06:30:43 +00:00
feat: add PromptCacheRO option
This commit is contained in:
parent
3b5df11881
commit
95bf290e37
5 changed files with 13 additions and 122 deletions
|
@ -45,6 +45,7 @@ func defaultLLamaOpts(c Config) []llama.ModelOption {
|
|||
if c.Batch != 0 {
|
||||
llamaOpts = append(llamaOpts, llama.SetNBatch(c.Batch))
|
||||
}
|
||||
|
||||
return llamaOpts
|
||||
}
|
||||
|
||||
|
@ -174,6 +175,10 @@ func buildLLamaPredictOptions(c Config, modelPath string) []llama.PredictOption
|
|||
predictOptions = append(predictOptions, llama.EnablePromptCacheAll)
|
||||
}
|
||||
|
||||
if c.PromptCacheRO {
|
||||
predictOptions = append(predictOptions, llama.EnablePromptCacheRO)
|
||||
}
|
||||
|
||||
if c.PromptCachePath != "" {
|
||||
// Create parent directory
|
||||
p := filepath.Join(modelPath, c.PromptCachePath)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue