fix: Update Ollama context window handling and input confirmation logic

This commit is contained in:
Paul Gauthier 2025-01-28 18:44:34 -08:00 committed by Paul Gauthier (aider)
parent 298f713e9b
commit 7aa6a30169
2 changed files with 10 additions and 9 deletions

View file

@ -1246,7 +1246,7 @@ class Coder:
self.io.tool_output("- Use /drop to remove unneeded files from the chat") self.io.tool_output("- Use /drop to remove unneeded files from the chat")
self.io.tool_output("- Use /clear to clear the chat history") self.io.tool_output("- Use /clear to clear the chat history")
self.io.tool_output("- Break your code into smaller files") self.io.tool_output("- Break your code into smaller files")
proceed = "y" proceed = "Y"
self.io.tool_output( self.io.tool_output(
"It's probably safe to try and send the request, most providers won't charge if" "It's probably safe to try and send the request, most providers won't charge if"
" the context limit is exceeded." " the context limit is exceeded."
@ -1255,14 +1255,14 @@ class Coder:
# Special warning for Ollama models about context window size # Special warning for Ollama models about context window size
if self.main_model.name.startswith(("ollama/", "ollama_chat/")): if self.main_model.name.startswith(("ollama/", "ollama_chat/")):
extra_params = getattr(self.main_model, "extra_params", None) or {} extra_params = getattr(self.main_model, "extra_params", None) or {}
num_ctx = extra_params.get("num_ctx", 8192) num_ctx = extra_params.get("num_ctx", 2048)
if max_input_tokens and max_input_tokens > num_ctx: if input_tokens > num_ctx:
self.io.tool_waning( proceed = "N"
f"Your Ollama model is configured with num_ctx={num_ctx} tokens of" self.io.tool_warning(f"""
" context window\nSee" Your Ollama model is configured with num_ctx={num_ctx} tokens of context window.
" https://aider.chat/docs/llms/ollama.html#setting-the-context-window-size" You are attempting to send {input_tokens} tokens.
" for help configuring larger context windows." See https://aider.chat/docs/llms/ollama.html#setting-the-context-window-size
) """.strip()) # noqa
if proceed and not self.io.confirm_ask("Try to proceed anyway?", default=proceed): if proceed and not self.io.confirm_ask("Try to proceed anyway?", default=proceed):
return False return False

View file

@ -732,6 +732,7 @@ class InputOutput:
question, question,
style=style, style=style,
complete_while_typing=False, complete_while_typing=False,
default=default,
) )
else: else:
res = input(question) res = input(question)