fix: set default rope if not specified

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2023-07-29 00:04:25 +02:00
parent fa4de05c14
commit f085baa77d
2 changed files with 20 additions and 12 deletions

View file

@ -58,6 +58,15 @@ func (llm *LLM) Load(opts *pb.ModelOptions) error {
}
func buildPredictOptions(opts *pb.PredictOptions) []llama.PredictOption {
ropeFreqBase := float32(1000)
ropeFreqScale := float32(1)
if opts.RopeFreqBase != 0 {
ropeFreqBase = opts.RopeFreqBase
}
if opts.RopeFreqScale != 0 {
ropeFreqScale = opts.RopeFreqScale
}
predictOptions := []llama.PredictOption{
llama.SetTemperature(opts.Temperature),
llama.SetTopP(opts.TopP),
@ -65,8 +74,8 @@ func buildPredictOptions(opts *pb.PredictOptions) []llama.PredictOption {
llama.SetTokens(int(opts.Tokens)),
llama.SetThreads(int(opts.Threads)),
llama.WithGrammar(opts.Grammar),
llama.SetRopeFreqBase(opts.RopeFreqBase),
llama.SetRopeFreqScale(opts.RopeFreqScale),
llama.SetRopeFreqBase(ropeFreqBase),
llama.SetRopeFreqScale(ropeFreqScale),
llama.SetNegativePromptScale(opts.NegativePromptScale),
llama.SetNegativePrompt(opts.NegativePrompt),
}