feat(grpc): return consumed token count and update response accordingly (#2035)

Fixes: #1920
This commit is contained in:
Ettore Di Giacinto 2024-04-15 19:47:11 +02:00 committed by GitHub
parent de3a1a0a8e
commit e843d7df0e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 20 additions and 4 deletions

View file

@ -160,7 +160,7 @@ func (oais *OpenAIService) GenerateTextFromRequest(request *schema.OpenAIRequest
bc, request, err := oais.getConfig(request)
if err != nil {
log.Error().Msgf("[oais::GenerateTextFromRequest] error getting configuration: %q", err)
log.Error().Err(err).Msgf("[oais::GenerateTextFromRequest] error getting configuration")
return
}
@ -259,7 +259,7 @@ func (oais *OpenAIService) GenerateTextFromRequest(request *schema.OpenAIRequest
// If any of the setup goroutines experienced an error, quit early here.
if setupError != nil {
go func() {
log.Error().Msgf("[OAIS GenerateTextFromRequest] caught an error during setup: %q", setupError)
log.Error().Err(setupError).Msgf("[OAIS GenerateTextFromRequest] caught an error during setup")
rawFinalResultChannel <- concurrency.ErrorOr[*schema.OpenAIResponse]{Error: setupError}
close(rawFinalResultChannel)
}()
@ -603,7 +603,7 @@ func (oais *OpenAIService) GenerateFromMultipleMessagesChatRequest(request *sche
Usage: schema.OpenAIUsage{
PromptTokens: rawResult.Value.Usage.Prompt,
CompletionTokens: rawResult.Value.Usage.Completion,
TotalTokens: rawResult.Value.Usage.Prompt + rawResult.Value.Usage.Prompt,
TotalTokens: rawResult.Value.Usage.Prompt + rawResult.Value.Usage.Completion,
},
}
@ -644,7 +644,7 @@ func (oais *OpenAIService) GenerateFromMultipleMessagesChatRequest(request *sche
Usage: schema.OpenAIUsage{
PromptTokens: rawResult.Value.Usage.Prompt,
CompletionTokens: rawResult.Value.Usage.Completion,
TotalTokens: rawResult.Value.Usage.Prompt + rawResult.Value.Usage.Prompt,
TotalTokens: rawResult.Value.Usage.Prompt + rawResult.Value.Usage.Completion,
},
}