mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-29 00:35:00 +00:00
only show token costs if usage exists
This commit is contained in:
parent
93ea45f588
commit
166984e94a
1 changed files with 13 additions and 9 deletions
|
@ -606,15 +606,17 @@ class Coder:
|
||||||
self.io.tool_error(show_content_err)
|
self.io.tool_error(show_content_err)
|
||||||
raise Exception("No data found in openai response!")
|
raise Exception("No data found in openai response!")
|
||||||
|
|
||||||
prompt_tokens = completion.usage.prompt_tokens
|
tokens = None
|
||||||
completion_tokens = completion.usage.completion_tokens
|
if hasattr(completion, 'usage'):
|
||||||
|
prompt_tokens = completion.usage.prompt_tokens
|
||||||
|
completion_tokens = completion.usage.completion_tokens
|
||||||
|
|
||||||
tokens = f"{prompt_tokens} prompt tokens, {completion_tokens} completion tokens"
|
tokens = f"{prompt_tokens} prompt tokens, {completion_tokens} completion tokens"
|
||||||
if self.main_model.prompt_price:
|
if self.main_model.prompt_price:
|
||||||
cost = prompt_tokens * self.main_model.prompt_price / 1000
|
cost = prompt_tokens * self.main_model.prompt_price / 1000
|
||||||
cost += completion_tokens * self.main_model.completion_price / 1000
|
cost += completion_tokens * self.main_model.completion_price / 1000
|
||||||
tokens += f", ${cost:.6f} cost"
|
tokens += f", ${cost:.6f} cost"
|
||||||
self.total_cost += cost
|
self.total_cost += cost
|
||||||
|
|
||||||
show_resp = self.render_incremental_response(True)
|
show_resp = self.render_incremental_response(True)
|
||||||
if self.pretty:
|
if self.pretty:
|
||||||
|
@ -625,7 +627,9 @@ class Coder:
|
||||||
show_resp = Text(show_resp or "<no response>")
|
show_resp = Text(show_resp or "<no response>")
|
||||||
|
|
||||||
self.io.console.print(show_resp)
|
self.io.console.print(show_resp)
|
||||||
self.io.tool_output(tokens)
|
|
||||||
|
if tokens is not None:
|
||||||
|
self.io.tool_output(tokens)
|
||||||
|
|
||||||
def show_send_output_stream(self, completion):
|
def show_send_output_stream(self, completion):
|
||||||
live = None
|
live = None
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue