mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
feat: support slices or strings in the prompt completion endpoint (#162)
Signed-off-by: mudler <mudler@mocaccino.org>
This commit is contained in:
parent
0a4899f366
commit
67992a7d99
1 changed files with 38 additions and 18 deletions
|
@ -57,7 +57,7 @@ type OpenAIRequest struct {
|
||||||
Model string `json:"model" yaml:"model"`
|
Model string `json:"model" yaml:"model"`
|
||||||
|
|
||||||
// Prompt is read only by completion API calls
|
// Prompt is read only by completion API calls
|
||||||
Prompt string `json:"prompt" yaml:"prompt"`
|
Prompt interface{} `json:"prompt" yaml:"prompt"`
|
||||||
|
|
||||||
// Edit endpoint
|
// Edit endpoint
|
||||||
Instruction string `json:"instruction" yaml:"instruction"`
|
Instruction string `json:"instruction" yaml:"instruction"`
|
||||||
|
@ -122,9 +122,12 @@ func updateConfig(config *Config, input *OpenAIRequest) {
|
||||||
if stop != "" {
|
if stop != "" {
|
||||||
config.StopWords = append(config.StopWords, stop)
|
config.StopWords = append(config.StopWords, stop)
|
||||||
}
|
}
|
||||||
case []string:
|
case []interface{}:
|
||||||
config.StopWords = append(config.StopWords, stop...)
|
for _, pp := range stop {
|
||||||
|
if s, ok := pp.(string); ok {
|
||||||
|
config.StopWords = append(config.StopWords, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.RepeatPenalty != 0 {
|
if input.RepeatPenalty != 0 {
|
||||||
|
@ -234,27 +237,44 @@ func completionEndpoint(cm ConfigMerger, debug bool, loader *model.ModelLoader,
|
||||||
|
|
||||||
log.Debug().Msgf("Parameter Config: %+v", config)
|
log.Debug().Msgf("Parameter Config: %+v", config)
|
||||||
|
|
||||||
predInput := input.Prompt
|
predInput := []string{}
|
||||||
|
|
||||||
|
switch p := input.Prompt.(type) {
|
||||||
|
case string:
|
||||||
|
predInput = append(predInput, p)
|
||||||
|
case []interface{}:
|
||||||
|
for _, pp := range p {
|
||||||
|
if s, ok := pp.(string); ok {
|
||||||
|
predInput = append(predInput, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
templateFile := config.Model
|
templateFile := config.Model
|
||||||
|
|
||||||
if config.TemplateConfig.Completion != "" {
|
if config.TemplateConfig.Completion != "" {
|
||||||
templateFile = config.TemplateConfig.Completion
|
templateFile = config.TemplateConfig.Completion
|
||||||
}
|
}
|
||||||
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
var result []Choice
|
||||||
templatedInput, err := loader.TemplatePrefix(templateFile, struct {
|
for _, i := range predInput {
|
||||||
Input string
|
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
||||||
}{Input: predInput})
|
templatedInput, err := loader.TemplatePrefix(templateFile, struct {
|
||||||
if err == nil {
|
Input string
|
||||||
predInput = templatedInput
|
}{Input: i})
|
||||||
log.Debug().Msgf("Template found, input modified to: %s", predInput)
|
if err == nil {
|
||||||
}
|
i = templatedInput
|
||||||
|
log.Debug().Msgf("Template found, input modified to: %s", i)
|
||||||
|
}
|
||||||
|
|
||||||
result, err := ComputeChoices(predInput, input, config, loader, func(s string, c *[]Choice) {
|
r, err := ComputeChoices(i, input, config, loader, func(s string, c *[]Choice) {
|
||||||
*c = append(*c, Choice{Text: s})
|
*c = append(*c, Choice{Text: s})
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
result = append(result, r...)
|
||||||
}
|
}
|
||||||
|
|
||||||
resp := &OpenAIResponse{
|
resp := &OpenAIResponse{
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue