mirror of
https://github.com/mudler/LocalAI.git
synced 2025-06-27 13:15:00 +00:00
added Enable token stream on /completions
This commit is contained in:
parent
18a701355c
commit
a5e9da6bf0
1 changed files with 31 additions and 0 deletions
|
@ -192,6 +192,37 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
|
||||||
|
|
||||||
jsonResult, _ := json.Marshal(resp)
|
jsonResult, _ := json.Marshal(resp)
|
||||||
log.Debug().Msgf("Response: %s", jsonResult)
|
log.Debug().Msgf("Response: %s", jsonResult)
|
||||||
|
if input.Stream {
|
||||||
|
responses := make(chan OpenAIResponse)
|
||||||
|
|
||||||
|
go func() {
|
||||||
|
defer close(responses)
|
||||||
|
for _, r := range result {
|
||||||
|
responses <- OpenAIResponse{
|
||||||
|
Model: input.Model,
|
||||||
|
Choices: []Choice{r},
|
||||||
|
Object: "text_completion",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
c.Context().SetContentType("text/event-stream")
|
||||||
|
c.Set("Cache-Control", "no-cache")
|
||||||
|
c.Set("Connection", "keep-alive")
|
||||||
|
c.Set("Transfer-Encoding", "chunked")
|
||||||
|
|
||||||
|
c.Context().SetBodyStreamWriter(fasthttp.StreamWriter(func(w *bufio.Writer) {
|
||||||
|
for ev := range responses {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
enc := json.NewEncoder(&buf)
|
||||||
|
enc.Encode(ev)
|
||||||
|
|
||||||
|
log.Debug().Msgf("Sending chunk: %s", buf.String())
|
||||||
|
fmt.Fprintf(w, "data: %v\n", buf.String())
|
||||||
|
w.Flush()
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
// Return the prediction in the response body
|
// Return the prediction in the response body
|
||||||
return c.JSON(resp)
|
return c.JSON(resp)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue