mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 02:24:59 +00:00
feat: extend model configuration for llama.cpp (#536)
This commit is contained in:
parent
694dd4ad9e
commit
5abbb134d9
6 changed files with 69 additions and 150 deletions
|
@ -4,8 +4,8 @@ import (
|
|||
"bufio"
|
||||
"bytes"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
|
@ -125,6 +125,9 @@ type OpenAIRequest struct {
|
|||
MirostatTAU float64 `json:"mirostat_tau" yaml:"mirostat_tau"`
|
||||
Mirostat int `json:"mirostat" yaml:"mirostat"`
|
||||
|
||||
FrequencyPenalty float64 `json:"frequency_penalty" yaml:"frequency_penalty"`
|
||||
TFZ float64 `json:"tfz" yaml:"tfz"`
|
||||
|
||||
Seed int `json:"seed" yaml:"seed"`
|
||||
|
||||
// Image (not supported by OpenAI)
|
||||
|
@ -191,7 +194,7 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
|||
}
|
||||
|
||||
if input.Stream {
|
||||
if (len(config.PromptStrings) > 1) {
|
||||
if len(config.PromptStrings) > 1 {
|
||||
return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing")
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue