mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-28 14:35:00 +00:00
feat: add rope settings and negative prompt, drop grammar backend (#797)
Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
12fe0932c4
commit
569c1d1163
15 changed files with 291 additions and 491 deletions
|
@ -61,6 +61,11 @@ func buildPredictOptions(opts *pb.PredictOptions) []llama.PredictOption {
|
|||
llama.SetTopK(int(opts.TopK)),
|
||||
llama.SetTokens(int(opts.Tokens)),
|
||||
llama.SetThreads(int(opts.Threads)),
|
||||
llama.WithGrammar(opts.Grammar),
|
||||
llama.SetRopeFreqBase(float64(opts.RopeFreqBase)),
|
||||
llama.SetRopeFreqScale(float64(opts.RopeFreqScale)),
|
||||
llama.SetNegativePromptScale(float64(opts.NegativePromptScale)),
|
||||
llama.SetNegativePrompt(opts.NegativePrompt),
|
||||
}
|
||||
|
||||
if opts.PromptCacheAll {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue