From 21e96df85a0bd4a4f6abf3e18d34f2063bef20d6 Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Thu, 6 Feb 2025 14:56:58 -0800 Subject: [PATCH] copy --- HISTORY.md | 2 +- aider/website/HISTORY.md | 2 +- aider/website/_data/blame.yml | 69 +++++++++++++++++++++ aider/website/assets/sample-analytics.jsonl | 6 +- aider/website/docs/llms/ollama.md | 13 ++-- 5 files changed, 81 insertions(+), 11 deletions(-) diff --git a/HISTORY.md b/HISTORY.md index 01910edbc..12fe30177 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -3,7 +3,7 @@ ### Aider v0.74.0 - Dynamically changes the Ollama context window to hold the current chat. -- Better support for o3-mini, DeepSeek V3 & R1, o1-mini, o1 via secondary API providers. +- Better support for o3-mini, DeepSeek V3 & R1, o1-mini, o1 especially via third-party API providers. - Remove `` tags from R1 responses for commit messages (and other weak model uses). - Can now specify `use_temperature: ` in model settings, not just true/false. - The full docker container now includes `boto3` for Bedrock. diff --git a/aider/website/HISTORY.md b/aider/website/HISTORY.md index 660e1577b..1d43f67dd 100644 --- a/aider/website/HISTORY.md +++ b/aider/website/HISTORY.md @@ -26,7 +26,7 @@ cog.out(text) ### Aider v0.74.0 - Dynamically changes the Ollama context window to hold the current chat. -- Better support for o3-mini, DeepSeek V3 & R1, o1-mini, o1 via secondary API providers. +- Better support for o3-mini, DeepSeek V3 & R1, o1-mini, o1 especially via third-party API providers. - Remove `` tags from R1 responses for commit messages (and other weak model uses). - Can now specify `use_temperature: ` in model settings, not just true/false. - The full docker container now includes `boto3` for Bedrock. diff --git a/aider/website/_data/blame.yml b/aider/website/_data/blame.yml index bdebdc6d9..019b0829e 100644 --- a/aider/website/_data/blame.yml +++ b/aider/website/_data/blame.yml @@ -3739,3 +3739,72 @@ xqyz: 1 start_tag: v0.72.0 total_lines: 409 +- aider_percentage: 77.14 + aider_total: 604 + end_date: '2025-02-06' + end_tag: v0.74.0 + file_counts: + aider/__init__.py: + Paul Gauthier: 1 + aider/args.py: + Paul Gauthier: 1 + aider/coders/base_coder.py: + Paul Gauthier: 24 + Paul Gauthier (aider): 9 + aider/coders/editblock_coder.py: + Paul Gauthier: 5 + aider/coders/wholefile_coder.py: + Paul Gauthier: 2 + aider/commands.py: + Paul Gauthier: 1 + aider/exceptions.py: + Paul Gauthier: 4 + Paul Gauthier (aider): 6 + aider/history.py: + Paul Gauthier (aider): 1 + aider/io.py: + Paul Gauthier: 4 + Paul Gauthier (aider): 18 + aider/llm.py: + Paul Gauthier: 3 + aider/main.py: + Paul Gauthier: 21 + Paul Gauthier (aider): 25 + aider/models.py: + Paul Gauthier: 83 + Paul Gauthier (aider): 77 + aider/repo.py: + Paul Gauthier: 1 + Paul Gauthier (aider): 2 + "Viktor Sz\xE9pe": 3 + aider/watch.py: + Paul Gauthier (aider): 45 + benchmark/docker.sh: + Paul Gauthier: 2 + docker/Dockerfile: + Paul Gauthier: 5 + Paul Gauthier (aider): 4 + tests/basic/test_editblock.py: + Paul Gauthier: 7 + tests/basic/test_history.py: + Paul Gauthier (aider): 13 + tests/basic/test_io.py: + Paul Gauthier (aider): 46 + tests/basic/test_main.py: + Paul Gauthier: 8 + Paul Gauthier (aider): 1 + tests/basic/test_models.py: + Paul Gauthier (aider): 297 + tests/basic/test_repo.py: + Paul Gauthier (aider): 11 + tests/basic/test_sendchat.py: + Paul Gauthier (aider): 7 + tests/basic/test_watch.py: + Paul Gauthier: 4 + Paul Gauthier (aider): 42 + grand_total: + Paul Gauthier: 176 + Paul Gauthier (aider): 604 + "Viktor Sz\xE9pe": 3 + start_tag: v0.73.0 + total_lines: 783 diff --git a/aider/website/assets/sample-analytics.jsonl b/aider/website/assets/sample-analytics.jsonl index 056c949d6..a539a3afa 100644 --- a/aider/website/assets/sample-analytics.jsonl +++ b/aider/website/assets/sample-analytics.jsonl @@ -1,6 +1,3 @@ -{"event": "ai-comments execute", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} -{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} -{"event": "ai-comments execute", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} {"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} {"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} {"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738703041} @@ -998,3 +995,6 @@ {"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738880524} {"event": "message_send_exception", "properties": {"exception": "[SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed: self-signed certificate (_ssl.c:1000)"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738880525} {"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738880525} +{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738881386} +{"event": "gui session", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738881386} +{"event": "exit", "properties": {"reason": "GUI session ended"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1738881386} diff --git a/aider/website/docs/llms/ollama.md b/aider/website/docs/llms/ollama.md index 771b3022c..014baa175 100644 --- a/aider/website/docs/llms/ollama.md +++ b/aider/website/docs/llms/ollama.md @@ -44,15 +44,16 @@ setx OLLAMA_API_KEY # Windows, restart shell after setx [Ollama uses a 2k context window by default](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-can-i-specify-the-context-window-size), which is very small for working with aider. - +It also **silently** discards context that exceeds the window. +This is especially dangerous because many users don't even realize that most of their data +is being discarded by Ollama. + By default, aider sets Ollama's context window to be large enough for each request you send plus 8k tokens for the reply. +This ensures data isn't silently discarded by Ollama. -Larger context windows may be helpful to allow larger replies from the LLM -but will use memory and increase latency. -If you would like -a larger context window -you can use a +If you'd like you can configure a fixed sized context window instead +with an [`.aider.model.settings.yml` file](https://aider.chat/docs/config/adv-model-settings.html#model-settings) like this: