From d8d410749222fe1169d68698f60eb0cb92782d49 Mon Sep 17 00:00:00 2001 From: Ettore Di Giacinto Date: Sat, 15 Jul 2023 22:48:24 +0200 Subject: [PATCH] fix: make first stream message to send empty content Previously we were using omitempty which was then parsed by clients as "". Now we return null, which certain clients concatenates blindly. This makes it close to OpenAI where the first message sent has an empty content, but just the role. Signed-off-by: Ettore Di Giacinto --- api/openai/chat.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/api/openai/chat.go b/api/openai/chat.go index 30f6e01a..345e5c9a 100644 --- a/api/openai/chat.go +++ b/api/openai/chat.go @@ -19,9 +19,10 @@ import ( func ChatEndpoint(cm *config.ConfigLoader, o *options.Option) func(c *fiber.Ctx) error { process := func(s string, req *OpenAIRequest, config *config.Config, loader *model.ModelLoader, responses chan OpenAIResponse) { + emptyMessage := "" initialMessage := OpenAIResponse{ Model: req.Model, // we have to return what the user sent here, due to OpenAI spec. - Choices: []Choice{{Delta: &Message{Role: "assistant"}}}, + Choices: []Choice{{Delta: &Message{Role: "assistant", Content: &emptyMessage}}}, Object: "chat.completion.chunk", } responses <- initialMessage