mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-30 06:30:43 +00:00
fix: make first stream message to send empty content
Previously we were using omitempty which was then parsed by clients as "". Now we return null, which certain clients concatenates blindly. This makes it close to OpenAI where the first message sent has an empty content, but just the role. Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
3c3a9b765a
commit
d8d4107492
1 changed files with 2 additions and 1 deletions
|
@ -19,9 +19,10 @@ import (
|
||||||
|
|
||||||
func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
|
func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
|
||||||
process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
|
process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
|
||||||
|
emptyMessage := ""
|
||||||
initialMessage := OpenAIResponse{
|
initialMessage := OpenAIResponse{
|
||||||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||||
Choices: []Choice{{Delta: &Message{Role: "assistant"}}},
|
Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}},
|
||||||
Object: "chat.completion.chunk",
|
Object: "chat.completion.chunk",
|
||||||
}
|
}
|
||||||
responses <- initialMessage
|
responses <- initialMessage
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue