diff --git a/aider/args.py b/aider/args.py index 567160abe..6ac071491 100644 --- a/aider/args.py +++ b/aider/args.py @@ -237,8 +237,8 @@ def get_parser(default_config_files, git_root): type=int, default=None, help=( - "Maximum number of tokens to use for chat history. If not specified, uses the model's" - " max_chat_history_tokens." + "Soft limit on tokens for chat history, after which summarization begins." + " If unspecified, defaults to the model's max_chat_history_tokens." ), ) # This is a duplicate of the argument in the preparser and is a no-op by this time of @@ -719,7 +719,6 @@ def get_md_help(): parser.formatter_class = MarkdownHelpFormatter return argparse.ArgumentParser.format_help(parser) - return parser.format_help() def get_sample_yaml(): @@ -733,7 +732,6 @@ def get_sample_yaml(): parser.formatter_class = YamlHelpFormatter return argparse.ArgumentParser.format_help(parser) - return parser.format_help() def get_sample_dotenv(): @@ -747,7 +745,6 @@ def get_sample_dotenv(): parser.formatter_class = DotEnvFormatter return argparse.ArgumentParser.format_help(parser) - return parser.format_help() def main(): diff --git a/aider/models.py b/aider/models.py index 4e7e529d3..f94d83b44 100644 --- a/aider/models.py +++ b/aider/models.py @@ -319,7 +319,7 @@ MODEL_SETTINGS = [ ModelSettings( "openrouter/anthropic/claude-3.5-sonnet", "diff", - weak_model_name="openrouter/anthropic/claude-3-haiku-20240307", + weak_model_name="openrouter/anthropic/claude-3-haiku", editor_model_name="openrouter/anthropic/claude-3.5-sonnet", editor_edit_format="editor-diff", use_repo_map=True, @@ -334,7 +334,7 @@ MODEL_SETTINGS = [ ModelSettings( "openrouter/anthropic/claude-3.5-sonnet:beta", "diff", - weak_model_name="openrouter/anthropic/claude-3-haiku-20240307", + weak_model_name="openrouter/anthropic/claude-3-haiku:beta", editor_model_name="openrouter/anthropic/claude-3.5-sonnet:beta", editor_edit_format="editor-diff", use_repo_map=True, @@ -513,6 +513,18 @@ MODEL_SETTINGS = [ use_temperature=False, streaming=False, ), + ModelSettings( + "azure/o1-mini", + "whole", + weak_model_name="azure/gpt-4o-mini", + editor_model_name="azure/gpt-4o", + editor_edit_format="editor-diff", + use_repo_map=True, + reminder="user", + use_system_prompt=False, + use_temperature=False, + streaming=False, + ), ModelSettings( "o1-mini", "whole", @@ -537,6 +549,18 @@ MODEL_SETTINGS = [ use_temperature=False, streaming=False, ), + ModelSettings( + "azure/o1-preview", + "diff", + weak_model_name="azure/gpt-4o-mini", + editor_model_name="azure/gpt-4o", + editor_edit_format="editor-diff", + use_repo_map=True, + reminder="user", + use_system_prompt=False, + use_temperature=False, + streaming=False, + ), ModelSettings( "o1-preview", "architect", diff --git a/aider/website/_includes/multi-line.md b/aider/website/_includes/multi-line.md index 9d37aadd3..757b93cec 100644 --- a/aider/website/_includes/multi-line.md +++ b/aider/website/_includes/multi-line.md @@ -2,4 +2,4 @@ You can send long, multi-line messages in the chat in a few ways: - Paste a multi-line message directly into the chat. - Enter `{` alone on the first line to start a multiline message and `}` alone on the last line to end it. - Use Meta-ENTER to start a new line without sending the message (Esc+ENTER in some environments). - - Use `/clipboard` to paste text from the clipboard into the chat. + - Use `/paste` to paste text from the clipboard into the chat. diff --git a/aider/website/assets/sample.aider.conf.yml b/aider/website/assets/sample.aider.conf.yml index 71e6b83f5..e0b8db804 100644 --- a/aider/website/assets/sample.aider.conf.yml +++ b/aider/website/assets/sample.aider.conf.yml @@ -104,7 +104,7 @@ ## Only work with models that have meta-data available (default: True) #show-model-warnings: true -## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens. +## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens. #max-chat-history-tokens: xxx ## Specify the .env file to load (default: .env in git root) diff --git a/aider/website/assets/sample.env b/aider/website/assets/sample.env index f96fd9964..2fce0d88a 100644 --- a/aider/website/assets/sample.env +++ b/aider/website/assets/sample.env @@ -108,7 +108,7 @@ ## Only work with models that have meta-data available (default: True) #AIDER_SHOW_MODEL_WARNINGS=true -## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens. +## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens. #AIDER_MAX_CHAT_HISTORY_TOKENS= ## Specify the .env file to load (default: .env in git root) diff --git a/aider/website/docs/config/adv-model-settings.md b/aider/website/docs/config/adv-model-settings.md index 95d19086c..37879b193 100644 --- a/aider/website/docs/config/adv-model-settings.md +++ b/aider/website/docs/config/adv-model-settings.md @@ -550,7 +550,7 @@ cog.out("```\n") use_repo_map: true use_system_prompt: true use_temperature: true - weak_model_name: openrouter/anthropic/claude-3-haiku-20240307 + weak_model_name: openrouter/anthropic/claude-3-haiku - accepts_images: true cache_control: true caches_by_default: false @@ -568,7 +568,7 @@ cog.out("```\n") use_repo_map: true use_system_prompt: true use_temperature: true - weak_model_name: openrouter/anthropic/claude-3-haiku-20240307 + weak_model_name: openrouter/anthropic/claude-3-haiku:beta - accepts_images: true cache_control: false caches_by_default: false diff --git a/aider/website/docs/config/aider_conf.md b/aider/website/docs/config/aider_conf.md index be3bc3b1e..0092ac2f3 100644 --- a/aider/website/docs/config/aider_conf.md +++ b/aider/website/docs/config/aider_conf.md @@ -160,7 +160,7 @@ cog.outl("```") ## Only work with models that have meta-data available (default: True) #show-model-warnings: true -## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens. +## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens. #max-chat-history-tokens: xxx ## Specify the .env file to load (default: .env in git root) diff --git a/aider/website/docs/config/dotenv.md b/aider/website/docs/config/dotenv.md index 0da62a6bf..892ae76c8 100644 --- a/aider/website/docs/config/dotenv.md +++ b/aider/website/docs/config/dotenv.md @@ -150,7 +150,7 @@ cog.outl("```") ## Only work with models that have meta-data available (default: True) #AIDER_SHOW_MODEL_WARNINGS=true -## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens. +## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens. #AIDER_MAX_CHAT_HISTORY_TOKENS= ## Specify the .env file to load (default: .env in git root) diff --git a/aider/website/docs/config/options.md b/aider/website/docs/config/options.md index afa0b6c85..1b9ebfe59 100644 --- a/aider/website/docs/config/options.md +++ b/aider/website/docs/config/options.md @@ -223,7 +223,7 @@ Aliases: - `--no-show-model-warnings` ### `--max-chat-history-tokens VALUE` -Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens. +Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens. Environment variable: `AIDER_MAX_CHAT_HISTORY_TOKENS` ### `--env-file ENV_FILE`