fix: make first stream message to send empty content

Previously we were using omitempty which was then parsed by clients as
"". Now we return null, which certain clients concatenates blindly.

This makes it close to OpenAI where the first message sent has an empty
content, but just the role.

Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
Ettore Di Giacinto 2023-07-15 22:48:24 +02:00
parent 3c3a9b765a
commit d8d4107492

View file

@ -19,9 +19,10 @@ import (
func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error {
process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) {
emptyMessage := ""
initialMessage := OpenAIResponse{
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
Choices: []Choice{{Delta: &Message{Role: "assistant"}}},
Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}},
Object: "chat.completion.chunk",
}
responses <- initialMessage