mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-28 14:35:00 +00:00
feat(functions): better free string matching, allow to expect strings after JSON (#2445)
Allow now any non-character, both as suffix and prefix when mixed grammars are enabled Signed-off-by: Ettore Di Giacinto <mudler@localai.io>
This commit is contained in:
parent
5dc6bace49
commit
3f7212c660
4 changed files with 43 additions and 15 deletions
|
@ -67,9 +67,10 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
|||
return true
|
||||
})
|
||||
|
||||
textContentToReturn = functions.ParseTextContent(result, config.FunctionsConfig)
|
||||
result = functions.CleanupLLMResult(result, config.FunctionsConfig)
|
||||
results := functions.ParseFunctionCall(result, config.FunctionsConfig)
|
||||
textContentToReturn = functions.ParseTextContent(result, config.FunctionsConfig)
|
||||
log.Debug().Msgf("Text content to return: %s", textContentToReturn)
|
||||
noActionToRun := len(results) > 0 && results[0].Name == noAction || len(results) == 0
|
||||
|
||||
switch {
|
||||
|
@ -136,7 +137,8 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
|||
Model: req.Model, // we have to return what the user sent here, due to OpenAI spec.
|
||||
Choices: []schema.Choice{{
|
||||
Delta: &schema.Message{
|
||||
Role: "assistant",
|
||||
Role: "assistant",
|
||||
Content: &textContentToReturn,
|
||||
ToolCalls: []schema.ToolCall{
|
||||
{
|
||||
Index: i,
|
||||
|
@ -477,9 +479,10 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
|||
return
|
||||
}
|
||||
|
||||
textContentToReturn = functions.ParseTextContent(s, config.FunctionsConfig)
|
||||
s = functions.CleanupLLMResult(s, config.FunctionsConfig)
|
||||
results := functions.ParseFunctionCall(s, config.FunctionsConfig)
|
||||
textContentToReturn = functions.ParseTextContent(s, config.FunctionsConfig)
|
||||
log.Debug().Msgf("Text content to return: %s", textContentToReturn)
|
||||
noActionsToRun := len(results) > 0 && results[0].Name == noActionName || len(results) == 0
|
||||
|
||||
switch {
|
||||
|
@ -507,6 +510,7 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
|||
if len(input.Tools) > 0 {
|
||||
// If we are using tools, we condense the function calls into
|
||||
// a single response choice with all the tools
|
||||
toolChoice.Message.Content = textContentToReturn
|
||||
toolChoice.Message.ToolCalls = append(toolChoice.Message.ToolCalls,
|
||||
schema.ToolCall{
|
||||
ID: id,
|
||||
|
@ -522,7 +526,8 @@ func ChatEndpoint(cl *config.BackendConfigLoader, ml *model.ModelLoader, startup
|
|||
*c = append(*c, schema.Choice{
|
||||
FinishReason: "function_call",
|
||||
Message: &schema.Message{
|
||||
Role: "assistant",
|
||||
Role: "assistant",
|
||||
Content: &textContentToReturn,
|
||||
FunctionCall: map[string]interface{}{
|
||||
"name": name,
|
||||
"arguments": args,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue