mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-25 14:55:00 +00:00
When autocompleting /add and /drop quote the filename if needed #440
This commit is contained in:
parent
23cc78f36b
commit
06973d85a3
1 changed files with 31 additions and 10 deletions
|
@ -7,10 +7,10 @@ import git
|
||||||
from prompt_toolkit.completion import Completion
|
from prompt_toolkit.completion import Completion
|
||||||
|
|
||||||
from aider import prompts, voice
|
from aider import prompts, voice
|
||||||
|
from aider.utils import is_gpt4_with_openai_base_url, is_image_file
|
||||||
|
|
||||||
from .dump import dump # noqa: F401
|
from .dump import dump # noqa: F401
|
||||||
|
|
||||||
from aider.utils import is_image_file, is_gpt4_with_openai_base_url
|
|
||||||
|
|
||||||
class Commands:
|
class Commands:
|
||||||
voice = None
|
voice = None
|
||||||
|
@ -174,7 +174,10 @@ class Commands:
|
||||||
# only switch to image model token count if gpt4 and openai and image in files
|
# only switch to image model token count if gpt4 and openai and image in files
|
||||||
image_in_chat = False
|
image_in_chat = False
|
||||||
if is_gpt4_with_openai_base_url(self.coder.main_model.name, self.coder.client):
|
if is_gpt4_with_openai_base_url(self.coder.main_model.name, self.coder.client):
|
||||||
image_in_chat = any(is_image_file(relative_fname) for relative_fname in self.coder.get_inchat_relative_files())
|
image_in_chat = any(
|
||||||
|
is_image_file(relative_fname)
|
||||||
|
for relative_fname in self.coder.get_inchat_relative_files()
|
||||||
|
)
|
||||||
limit = 128000 if image_in_chat else self.coder.main_model.max_context_tokens
|
limit = 128000 if image_in_chat else self.coder.main_model.max_context_tokens
|
||||||
|
|
||||||
remaining = limit - total
|
remaining = limit - total
|
||||||
|
@ -196,14 +199,16 @@ class Commands:
|
||||||
return
|
return
|
||||||
|
|
||||||
last_commit = self.coder.repo.repo.head.commit
|
last_commit = self.coder.repo.repo.head.commit
|
||||||
changed_files_last_commit = {item.a_path for item in last_commit.diff(last_commit.parents[0])}
|
changed_files_last_commit = {
|
||||||
|
item.a_path for item in last_commit.diff(last_commit.parents[0])
|
||||||
|
}
|
||||||
dirty_files = [item.a_path for item in self.coder.repo.repo.index.diff(None)]
|
dirty_files = [item.a_path for item in self.coder.repo.repo.index.diff(None)]
|
||||||
dirty_files_in_last_commit = changed_files_last_commit.intersection(dirty_files)
|
dirty_files_in_last_commit = changed_files_last_commit.intersection(dirty_files)
|
||||||
|
|
||||||
if dirty_files_in_last_commit:
|
if dirty_files_in_last_commit:
|
||||||
self.io.tool_error(
|
self.io.tool_error(
|
||||||
"The repository has uncommitted changes in files that were modified in the last commit. "
|
"The repository has uncommitted changes in files that were modified in the last"
|
||||||
"Please commit or stash them before undoing."
|
" commit. Please commit or stash them before undoing."
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -265,12 +270,17 @@ class Commands:
|
||||||
# don't use io.tool_output() because we don't want to log or further colorize
|
# don't use io.tool_output() because we don't want to log or further colorize
|
||||||
print(diff)
|
print(diff)
|
||||||
|
|
||||||
|
def quote_fname(self, fname):
|
||||||
|
if " " in fname and '"' not in fname:
|
||||||
|
fname = f'"{fname}"'
|
||||||
|
return fname
|
||||||
|
|
||||||
def completions_add(self, partial):
|
def completions_add(self, partial):
|
||||||
files = set(self.coder.get_all_relative_files())
|
files = set(self.coder.get_all_relative_files())
|
||||||
files = files - set(self.coder.get_inchat_relative_files())
|
files = files - set(self.coder.get_inchat_relative_files())
|
||||||
for fname in files:
|
for fname in files:
|
||||||
if partial.lower() in fname.lower():
|
if partial.lower() in fname.lower():
|
||||||
yield Completion(fname, start_position=-len(partial))
|
yield Completion(self.quote_fname(fname), start_position=-len(partial))
|
||||||
|
|
||||||
def glob_filtered_to_repo(self, pattern):
|
def glob_filtered_to_repo(self, pattern):
|
||||||
try:
|
try:
|
||||||
|
@ -333,8 +343,13 @@ class Commands:
|
||||||
if abs_file_path in self.coder.abs_fnames:
|
if abs_file_path in self.coder.abs_fnames:
|
||||||
self.io.tool_error(f"{matched_file} is already in the chat")
|
self.io.tool_error(f"{matched_file} is already in the chat")
|
||||||
else:
|
else:
|
||||||
if is_image_file(matched_file) and not is_gpt4_with_openai_base_url(self.coder.main_model.name, self.coder.client):
|
if is_image_file(matched_file) and not is_gpt4_with_openai_base_url(
|
||||||
self.io.tool_error(f"Cannot add image file {matched_file} as the model does not support image files")
|
self.coder.main_model.name, self.coder.client
|
||||||
|
):
|
||||||
|
self.io.tool_error(
|
||||||
|
f"Cannot add image file {matched_file} as the model does not support image"
|
||||||
|
" files"
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
content = self.io.read_text(abs_file_path)
|
content = self.io.read_text(abs_file_path)
|
||||||
if content is None:
|
if content is None:
|
||||||
|
@ -359,7 +374,7 @@ class Commands:
|
||||||
|
|
||||||
for fname in files:
|
for fname in files:
|
||||||
if partial.lower() in fname.lower():
|
if partial.lower() in fname.lower():
|
||||||
yield Completion(fname, start_position=-len(partial))
|
yield Completion(self.quote_fname(fname), start_position=-len(partial))
|
||||||
|
|
||||||
def cmd_drop(self, args):
|
def cmd_drop(self, args):
|
||||||
"Remove files from the chat session to free up context space"
|
"Remove files from the chat session to free up context space"
|
||||||
|
@ -409,7 +424,13 @@ class Commands:
|
||||||
combined_output = None
|
combined_output = None
|
||||||
try:
|
try:
|
||||||
result = subprocess.run(
|
result = subprocess.run(
|
||||||
args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, shell=True, encoding=self.io.encoding, errors='replace'
|
args,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
text=True,
|
||||||
|
shell=True,
|
||||||
|
encoding=self.io.encoding,
|
||||||
|
errors="replace",
|
||||||
)
|
)
|
||||||
combined_output = result.stdout
|
combined_output = result.stdout
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue