mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-31 09:44:59 +00:00
send output through InputOutput
This commit is contained in:
parent
304566a914
commit
11020c8aee
4 changed files with 34 additions and 23 deletions
|
@ -241,8 +241,6 @@ class Coder:
|
|||
dry_run=False,
|
||||
map_tokens=1024,
|
||||
verbose=False,
|
||||
assistant_output_color="blue",
|
||||
code_theme="default",
|
||||
stream=True,
|
||||
use_git=True,
|
||||
cur_messages=None,
|
||||
|
@ -315,8 +313,6 @@ class Coder:
|
|||
|
||||
self.auto_commits = auto_commits
|
||||
self.dirty_commits = dirty_commits
|
||||
self.assistant_output_color = assistant_output_color
|
||||
self.code_theme = code_theme
|
||||
|
||||
self.dry_run = dry_run
|
||||
self.pretty = self.io.pretty
|
||||
|
@ -1096,11 +1092,7 @@ class Coder:
|
|||
utils.show_messages(messages, functions=self.functions)
|
||||
|
||||
self.multi_response_content = ""
|
||||
if self.show_pretty() and self.stream:
|
||||
mdargs = dict(style=self.assistant_output_color, code_theme=self.code_theme)
|
||||
self.mdstream = MarkdownStream(mdargs=mdargs)
|
||||
else:
|
||||
self.mdstream = None
|
||||
self.mdstream=self.io.assistant_output("", self.stream)
|
||||
|
||||
retry_delay = 0.125
|
||||
|
||||
|
@ -1452,14 +1444,7 @@ class Coder:
|
|||
raise Exception("No data found in LLM response!")
|
||||
|
||||
show_resp = self.render_incremental_response(True)
|
||||
if self.show_pretty():
|
||||
show_resp = Markdown(
|
||||
show_resp, style=self.assistant_output_color, code_theme=self.code_theme
|
||||
)
|
||||
else:
|
||||
show_resp = Text(show_resp or "<no response>")
|
||||
|
||||
self.io.console.print(show_resp)
|
||||
self.io.assistant_output(show_resp)
|
||||
|
||||
if (
|
||||
hasattr(completion.choices[0], "finish_reason")
|
||||
|
|
|
@ -562,8 +562,7 @@ class Commands:
|
|||
"HEAD",
|
||||
)
|
||||
|
||||
# don't use io.tool_output() because we don't want to log or further colorize
|
||||
print(diff)
|
||||
self.io.print(diff)
|
||||
|
||||
def quote_fname(self, fname):
|
||||
if " " in fname and '"' not in fname:
|
||||
|
@ -1030,9 +1029,9 @@ class Commands:
|
|||
|
||||
if text:
|
||||
self.io.add_to_input_history(text)
|
||||
print()
|
||||
self.io.print()
|
||||
self.io.user_input(text, log_only=False)
|
||||
print()
|
||||
self.io.print()
|
||||
|
||||
return text
|
||||
|
||||
|
|
27
aider/io.py
27
aider/io.py
|
@ -18,6 +18,8 @@ from pygments.token import Token
|
|||
from rich.console import Console
|
||||
from rich.style import Style as RichStyle
|
||||
from rich.text import Text
|
||||
from rich.markdown import Markdown
|
||||
from aider.mdstream import MarkdownStream
|
||||
|
||||
from .dump import dump # noqa: F401
|
||||
from .utils import is_image_file
|
||||
|
@ -177,6 +179,8 @@ class InputOutput:
|
|||
tool_output_color=None,
|
||||
tool_error_color="red",
|
||||
tool_warning_color="#FFA500",
|
||||
assistant_output_color="blue",
|
||||
code_theme="default",
|
||||
encoding="utf-8",
|
||||
dry_run=False,
|
||||
llm_history_file=None,
|
||||
|
@ -191,6 +195,8 @@ class InputOutput:
|
|||
self.tool_output_color = tool_output_color if pretty else None
|
||||
self.tool_error_color = tool_error_color if pretty else None
|
||||
self.tool_warning_color = tool_warning_color if pretty else None
|
||||
self.assistant_output_color = assistant_output_color
|
||||
self.code_theme = code_theme
|
||||
|
||||
self.input = input
|
||||
self.output = output
|
||||
|
@ -563,6 +569,27 @@ class InputOutput:
|
|||
style = RichStyle(**style)
|
||||
self.console.print(*messages, style=style)
|
||||
|
||||
def assistant_output(self, message, stream=False):
|
||||
mdStream = None
|
||||
show_resp = message
|
||||
|
||||
if self.pretty:
|
||||
if stream:
|
||||
mdargs = dict(style=self.assistant_output_color, code_theme=self.code_theme)
|
||||
mdStream = MarkdownStream(mdargs=mdargs)
|
||||
else:
|
||||
show_resp = Markdown(
|
||||
message, style=self.assistant_output_color, code_theme=self.code_theme
|
||||
)
|
||||
else:
|
||||
show_resp = Text(message or "<no response>")
|
||||
|
||||
self.console.print(show_resp)
|
||||
return mdStream
|
||||
|
||||
def print(self, message):
|
||||
print(message)
|
||||
|
||||
def append_chat_history(self, text, linebreak=False, blockquote=False, strip=True):
|
||||
if blockquote:
|
||||
if strip:
|
||||
|
|
|
@ -396,6 +396,8 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
user_input_color=args.user_input_color,
|
||||
tool_output_color=args.tool_output_color,
|
||||
tool_error_color=args.tool_error_color,
|
||||
assistant_output_color=args.assistant_output_color,
|
||||
code_theme=args.code_theme,
|
||||
dry_run=args.dry_run,
|
||||
encoding=args.encoding,
|
||||
llm_history_file=args.llm_history_file,
|
||||
|
@ -577,8 +579,6 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
dry_run=args.dry_run,
|
||||
map_tokens=args.map_tokens,
|
||||
verbose=args.verbose,
|
||||
assistant_output_color=args.assistant_output_color,
|
||||
code_theme=args.code_theme,
|
||||
stream=args.stream,
|
||||
use_git=args.git,
|
||||
restore_chat_history=args.restore_chat_history,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue