mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-29 22:20:43 +00:00
fix(completionEndpoint): limit to 1 PromptStrings
only when Stream
This commit is contained in:
parent
b515ed2d37
commit
06d467a252
1 changed files with 4 additions and 4 deletions
|
@ -190,10 +190,6 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
||||||
templateFile = config.TemplateConfig.Completion
|
templateFile = config.TemplateConfig.Completion
|
||||||
}
|
}
|
||||||
|
|
||||||
if (len(config.PromptStrings) > 1) {
|
|
||||||
return errors.New("cannot handle more than 1 `PromptStrings`")
|
|
||||||
}
|
|
||||||
|
|
||||||
predInput := config.PromptStrings[0]
|
predInput := config.PromptStrings[0]
|
||||||
|
|
||||||
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
|
||||||
|
@ -206,6 +202,10 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if input.Stream {
|
if input.Stream {
|
||||||
|
if (len(config.PromptStrings) > 1) {
|
||||||
|
return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing")
|
||||||
|
}
|
||||||
|
|
||||||
responses := make(chan OpenAIResponse)
|
responses := make(chan OpenAIResponse)
|
||||||
|
|
||||||
go process(predInput, input, config, o.loader, responses)
|
go process(predInput, input, config, o.loader, responses)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue