mirror of
https://github.com/mudler/LocalAI.git
synced 2025-05-28 06:25:00 +00:00
feat(grpc): return consumed token count and update response accordingly (#2035)
Fixes: #1920
This commit is contained in:
parent
de3a1a0a8e
commit
e843d7df0e
4 changed files with 20 additions and 4 deletions
|
@ -114,6 +114,8 @@ message PredictOptions {
|
|||
// The response message containing the result
|
||||
message Reply {
|
||||
bytes message = 1;
|
||||
int32 tokens = 2;
|
||||
int32 prompt_tokens = 3;
|
||||
}
|
||||
|
||||
message ModelOptions {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue