mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-29 00:35:00 +00:00
better
This commit is contained in:
parent
2691c80ac2
commit
6c87d5be1c
5 changed files with 33 additions and 8 deletions
|
@ -419,7 +419,7 @@ class Coder:
|
||||||
]
|
]
|
||||||
|
|
||||||
main_sys = self.gpt_prompts.main_system
|
main_sys = self.gpt_prompts.main_system
|
||||||
if self.main_model.max_context_tokens > 4 * 1024:
|
# if self.main_model.max_context_tokens > 4 * 1024:
|
||||||
main_sys += "\n" + self.fmt_system_reminder()
|
main_sys += "\n" + self.fmt_system_reminder()
|
||||||
|
|
||||||
messages = [
|
messages = [
|
||||||
|
@ -481,10 +481,13 @@ class Coder:
|
||||||
self.update_cur_messages(content, edited)
|
self.update_cur_messages(content, edited)
|
||||||
|
|
||||||
if edited:
|
if edited:
|
||||||
if self.auto_commits and not self.dry_run:
|
if self.repo and self.auto_commits and not self.dry_run:
|
||||||
saved_message = self.auto_commit()
|
saved_message = self.auto_commit()
|
||||||
|
elif hasattr(self.gpt_prompts, "files_content_gpt_edits_no_repo"):
|
||||||
|
saved_message = self.gpt_prompts.files_content_gpt_edits_no_repo
|
||||||
else:
|
else:
|
||||||
saved_message = None
|
saved_message = None
|
||||||
|
|
||||||
self.move_back_cur_messages(saved_message)
|
self.move_back_cur_messages(saved_message)
|
||||||
|
|
||||||
add_rel_files_message = self.check_for_file_mentions(content)
|
add_rel_files_message = self.check_for_file_mentions(content)
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
class CoderPrompts:
|
class CoderPrompts:
|
||||||
files_content_gpt_edits = "I committed the changes with git hash {hash} & commit msg: {message}"
|
files_content_gpt_edits = "I committed the changes with git hash {hash} & commit msg: {message}"
|
||||||
|
|
||||||
|
files_content_gpt_edits_no_repo = "I updated the files."
|
||||||
|
|
||||||
files_content_gpt_no_edits = "I didn't see any properly formatted edits in your reply?!"
|
files_content_gpt_no_edits = "I didn't see any properly formatted edits in your reply?!"
|
||||||
|
|
||||||
files_content_local_edits = "I edited the files myself."
|
files_content_local_edits = "I edited the files myself."
|
||||||
|
|
|
@ -64,6 +64,20 @@ class EditBlockFunctionCoder(Coder):
|
||||||
self.gpt_prompts = EditBlockFunctionPrompts()
|
self.gpt_prompts = EditBlockFunctionPrompts()
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
def update_cur_messages(self, content, edited):
|
||||||
|
if self.partial_response_content:
|
||||||
|
self.cur_messages += [dict(role="assistant", content=self.partial_response_content)]
|
||||||
|
if self.partial_response_function_call:
|
||||||
|
self.cur_messages += [
|
||||||
|
dict(
|
||||||
|
role="assistant",
|
||||||
|
content=None,
|
||||||
|
function_call=self.partial_response_function_call,
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
dump(self.cur_messages)
|
||||||
|
|
||||||
def render_incremental_response(self, final=False):
|
def render_incremental_response(self, final=False):
|
||||||
if self.partial_response_content:
|
if self.partial_response_content:
|
||||||
return self.partial_response_content
|
return self.partial_response_content
|
||||||
|
@ -87,8 +101,8 @@ class EditBlockFunctionCoder(Coder):
|
||||||
edited = set()
|
edited = set()
|
||||||
for edit in edits:
|
for edit in edits:
|
||||||
path = get_arg(edit, "path")
|
path = get_arg(edit, "path")
|
||||||
original = get_arg(edit, "original_lines")
|
original = "\n".join(get_arg(edit, "original_lines")) + "\n"
|
||||||
updated = get_arg(edit, "updated_lines")
|
updated = "\n".join(get_arg(edit, "updated_lines")) + "\n"
|
||||||
|
|
||||||
full_path = self.allowed_to_edit(path)
|
full_path = self.allowed_to_edit(path)
|
||||||
if not full_path:
|
if not full_path:
|
||||||
|
|
|
@ -23,3 +23,5 @@ NEVER return code outside the `write_file` function.
|
||||||
|
|
||||||
# TODO: should this be present for using this with gpt-4?
|
# TODO: should this be present for using this with gpt-4?
|
||||||
repo_content_prefix = None
|
repo_content_prefix = None
|
||||||
|
|
||||||
|
# TODO: fix the chat history, except we can't keep the whole file
|
||||||
|
|
|
@ -25,6 +25,10 @@ def show_messages(messages, title=None):
|
||||||
|
|
||||||
for msg in messages:
|
for msg in messages:
|
||||||
role = msg["role"].upper()
|
role = msg["role"].upper()
|
||||||
content = msg["content"].splitlines()
|
content = msg.get("content")
|
||||||
for line in content:
|
if content:
|
||||||
|
for line in content.splitlines():
|
||||||
print(role, line)
|
print(role, line)
|
||||||
|
content = msg.get("function_call")
|
||||||
|
if content:
|
||||||
|
print(role, content)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue