mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-20 10:35:01 +00:00
fix(openai): consistently return stop reason (#4771)
We were not returning a stop reason when no tool was actually called (even if specified). Fixes: https://github.com/mudler/LocalAI/issues/4716 Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
e4b8ddb6a1
commit
8d45670e41
1 changed files with 10 additions and 6 deletions
|
@ -401,6 +401,11 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, evaluat
|
||||||
log.Debug().Msgf("Text content to return: %s", textContentToReturn)
|
log.Debug().Msgf("Text content to return: %s", textContentToReturn)
|
||||||
noActionsToRun := len(results) > 0 && results[0].Name == noActionName || len(results) == 0
|
noActionsToRun := len(results) > 0 && results[0].Name == noActionName || len(results) == 0
|
||||||
|
|
||||||
|
finishReason := "stop"
|
||||||
|
if len(input.Tools) > 0 {
|
||||||
|
finishReason = "tool_calls"
|
||||||
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case noActionsToRun:
|
case noActionsToRun:
|
||||||
result, err := handleQuestion(config, input, ml, startupOptions, results, s, predInput)
|
result, err := handleQuestion(config, input, ml, startupOptions, results, s, predInput)
|
||||||
|
@ -408,19 +413,18 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, evaluat
|
||||||
log.Error().Err(err).Msg("error handling question")
|
log.Error().Err(err).Msg("error handling question")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
*c = append(*c, schema.Choice{
|
*c = append(*c, schema.Choice{
|
||||||
|
FinishReason: finishReason,
|
||||||
Message: &schema.Message{Role: "assistant", Content: &result}})
|
Message: &schema.Message{Role: "assistant", Content: &result}})
|
||||||
default:
|
default:
|
||||||
toolChoice := schema.Choice{
|
toolChoice := schema.Choice{
|
||||||
|
FinishReason: finishReason,
|
||||||
Message: &schema.Message{
|
Message: &schema.Message{
|
||||||
Role: "assistant",
|
Role: "assistant",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(input.Tools) > 0 {
|
|
||||||
toolChoice.FinishReason = "tool_calls"
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ss := range results {
|
for _, ss := range results {
|
||||||
name, args := ss.Name, ss.Arguments
|
name, args := ss.Name, ss.Arguments
|
||||||
if len(input.Tools) > 0 {
|
if len(input.Tools) > 0 {
|
||||||
|
@ -438,7 +442,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, evaluat
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
// otherwise we return more choices directly
|
// otherwise we return more choices directly (deprecated)
|
||||||
*c = append(*c, schema.Choice{
|
*c = append(*c, schema.Choice{
|
||||||
FinishReason: "function_call",
|
FinishReason: "function_call",
|
||||||
Message: &schema.Message{
|
Message: &schema.Message{
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue