mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-24 22:34:59 +00:00
refac
This commit is contained in:
parent
d6467a8e30
commit
e7aa10a89b
1 changed files with 34 additions and 39 deletions
|
@ -812,7 +812,7 @@ class Coder:
|
||||||
utils.show_messages(messages, functions=self.functions)
|
utils.show_messages(messages, functions=self.functions)
|
||||||
|
|
||||||
self.multi_response_content = ""
|
self.multi_response_content = ""
|
||||||
if self.show_pretty():
|
if self.show_pretty() and self.stream:
|
||||||
mdargs = dict(style=self.assistant_output_color, code_theme=self.code_theme)
|
mdargs = dict(style=self.assistant_output_color, code_theme=self.code_theme)
|
||||||
self.mdstream = MarkdownStream(mdargs=mdargs)
|
self.mdstream = MarkdownStream(mdargs=mdargs)
|
||||||
else:
|
else:
|
||||||
|
@ -851,6 +851,9 @@ class Coder:
|
||||||
traceback.print_exc()
|
traceback.print_exc()
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.mdstream:
|
||||||
|
self.live_incremental_response(True)
|
||||||
|
|
||||||
if self.multi_response_content:
|
if self.multi_response_content:
|
||||||
self.multi_response_content += self.partial_response_content
|
self.multi_response_content += self.partial_response_content
|
||||||
self.partial_response_content = self.multi_response_content
|
self.partial_response_content = self.multi_response_content
|
||||||
|
@ -1158,9 +1161,6 @@ class Coder:
|
||||||
raise FinishReasonLength()
|
raise FinishReasonLength()
|
||||||
|
|
||||||
def show_send_output_stream(self, completion):
|
def show_send_output_stream(self, completion):
|
||||||
finish_reason_length = False
|
|
||||||
|
|
||||||
try:
|
|
||||||
for chunk in completion:
|
for chunk in completion:
|
||||||
if len(chunk.choices) == 0:
|
if len(chunk.choices) == 0:
|
||||||
continue
|
continue
|
||||||
|
@ -1169,8 +1169,6 @@ class Coder:
|
||||||
hasattr(chunk.choices[0], "finish_reason")
|
hasattr(chunk.choices[0], "finish_reason")
|
||||||
and chunk.choices[0].finish_reason == "length"
|
and chunk.choices[0].finish_reason == "length"
|
||||||
):
|
):
|
||||||
if self.main_model.can_prefill:
|
|
||||||
finish_reason_length = True
|
|
||||||
raise FinishReasonLength()
|
raise FinishReasonLength()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1197,9 +1195,6 @@ class Coder:
|
||||||
sys.stdout.write(text)
|
sys.stdout.write(text)
|
||||||
sys.stdout.flush()
|
sys.stdout.flush()
|
||||||
yield text
|
yield text
|
||||||
finally:
|
|
||||||
if self.show_pretty() and not finish_reason_length:
|
|
||||||
self.live_incremental_response(True)
|
|
||||||
|
|
||||||
def live_incremental_response(self, final):
|
def live_incremental_response(self, final):
|
||||||
show_resp = self.render_incremental_response(final)
|
show_resp = self.render_incremental_response(final)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue