diff --git a/HISTORY.md b/HISTORY.md index 717afde99..770450026 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,6 +1,7 @@ # Release history ### main branch + - Prompts to help DeepSeek work better when alternating between `/ask` and `/code`. - Streaming pretty LLM responses is smoother and faster for long replies. - Increased max chat history tokens limit from 4k to 8k. @@ -18,6 +19,7 @@ - Aider wrote 59% of the code in this release. ### Aider v0.70.0 + - Full support for o1 models. - Watch files now honors `--subtree-only`, and only watches that subtree. - Improved prompting for watch files, to work more reliably with more models. diff --git a/aider/website/HISTORY.md b/aider/website/HISTORY.md index 96278df7a..41cb24b05 100644 --- a/aider/website/HISTORY.md +++ b/aider/website/HISTORY.md @@ -24,6 +24,7 @@ cog.out(text) ### main branch + - Prompts to help DeepSeek work better when alternating between `/ask` and `/code`. - Streaming pretty LLM responses is smoother and faster for long replies. - Increased max chat history tokens limit from 4k to 8k. @@ -41,6 +42,7 @@ cog.out(text) - Aider wrote 59% of the code in this release. ### Aider v0.70.0 + - Full support for o1 models. - Watch files now honors `--subtree-only`, and only watches that subtree. - Improved prompting for watch files, to work more reliably with more models. diff --git a/aider/website/assets/sample-analytics.jsonl b/aider/website/assets/sample-analytics.jsonl index 67d172559..070ea9e0c 100644 --- a/aider/website/assets/sample-analytics.jsonl +++ b/aider/website/assets/sample-analytics.jsonl @@ -1,6 +1,3 @@ -{"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600170} -{"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600170} -{"event": "message_send", "properties": {"main_model": "deepseek/deepseek-chat", "weak_model": "gemini/gemini-2.0-flash-exp", "editor_model": "deepseek/deepseek-chat", "edit_format": "ask", "prompt_tokens": 6949, "completion_tokens": 328, "total_tokens": 7277, "cost": 0.0010647, "total_cost": 0.02245922000000001}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600177} {"event": "command_ask", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600197} {"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600197} {"event": "message_send", "properties": {"main_model": "deepseek/deepseek-chat", "weak_model": "gemini/gemini-2.0-flash-exp", "editor_model": "deepseek/deepseek-chat", "edit_format": "ask", "prompt_tokens": 7251, "completion_tokens": 404, "total_tokens": 7655, "cost": 0.00112826, "total_cost": 0.023587480000000008}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1735600206} @@ -998,3 +995,6 @@ {"event": "message_send_starting", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736452825} {"event": "message_send", "properties": {"main_model": "deepseek/deepseek-chat", "weak_model": "deepseek/deepseek-chat", "editor_model": "deepseek/deepseek-chat", "edit_format": "diff", "prompt_tokens": 22997, "completion_tokens": 237, "total_tokens": 23234, "cost": 0.0032859399999980404, "total_cost": 0.0032859399999980404}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736452837} {"event": "exit", "properties": {"reason": "Completed --message"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736452837} +{"event": "launched", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736453047} +{"event": "gui session", "properties": {}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736453047} +{"event": "exit", "properties": {"reason": "GUI session ended"}, "user_id": "c42c4e6b-f054-44d7-ae1f-6726cc41da88", "time": 1736453047} diff --git a/aider/website/docs/faq.md b/aider/website/docs/faq.md index 3029e53b4..e5043832c 100644 --- a/aider/website/docs/faq.md +++ b/aider/website/docs/faq.md @@ -237,9 +237,9 @@ tr:hover { background-color: #f5f5f5; }
Model Name | Total Tokens | Percent |
---|---|---|
deepseek/deepseek-chat | 1,097,898 | 79.9% |
deepseek/deepseek-coder | 230,595 | 16.8% |
o1 | 32,137 | 2.3% |
deepseek/deepseek-chat | 1,090,621 | 79.8% |
deepseek/deepseek-coder | 230,595 | 16.9% |
o1 | 32,137 | 2.4% |
gemini/gemini-1.5-flash-8b | 8,297 | 0.6% |
gemini/gemini-1.5-flash-002 | 4,964 | 0.4% |