mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-30 17:24:59 +00:00
style: fix line wrapping in Ollama context window message
This commit is contained in:
parent
37cbe6c488
commit
02e8158918
1 changed files with 2 additions and 2 deletions
|
@ -1250,8 +1250,8 @@ class Coder:
|
|||
num_ctx = self.main_model.extra_params.get("num_ctx")
|
||||
if num_ctx:
|
||||
self.io.tool_error(
|
||||
f"\nNote: Your Ollama model is configured with num_ctx={num_ctx}."
|
||||
f" See https://aider.chat/docs/llms/ollama.html#setting-the-context-window-size"
|
||||
f"\nNote: Your Ollama model is configured with num_ctx={num_ctx}. See"
|
||||
" https://aider.chat/docs/llms/ollama.html#setting-the-context-window-size"
|
||||
" for help configuring larger context windows."
|
||||
)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue