From c3b9d34e24e96b3a86722fb85458d02cc1e2f5b1 Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Thu, 26 Sep 2024 11:35:33 -0700 Subject: [PATCH] copy --- aider/website/assets/sample.aider.conf.yml | 15 +++ aider/website/assets/sample.env | 15 +++ .../website/docs/config/adv-model-settings.md | 108 +++++++++++++++++- aider/website/docs/config/aider_conf.md | 15 +++ aider/website/docs/config/dotenv.md | 15 +++ aider/website/docs/config/options.md | 27 ++++- aider/website/docs/leaderboards/index.md | 2 +- 7 files changed, 192 insertions(+), 5 deletions(-) diff --git a/aider/website/assets/sample.aider.conf.yml b/aider/website/assets/sample.aider.conf.yml index 5c3954326..f8316ad8a 100644 --- a/aider/website/assets/sample.aider.conf.yml +++ b/aider/website/assets/sample.aider.conf.yml @@ -50,6 +50,12 @@ ## Use deepseek/deepseek-coder model for the main chat #deepseek: false +## Use o1-mini model for the main chat +#o1-mini: false + +## Use o1-preview model for the main chat +#o1-preview: false + ################# # Model Settings: @@ -83,9 +89,18 @@ ## Specify what edit format the LLM should use (default depends on model) #edit-format: xxx +## Use senior edit format for the main chat +#senior: false + ## Specify the model to use for commit messages and chat history summarization (default depends on --model) #weak-model: xxx +## Specify the model to use for junior tasks (default depends on --model) +#junior-model: xxx + +## Specify the edit format for the junior model (default: depends on junior model) +#junior-edit-format: xxx + ## Only work with models that have meta-data available (default: True) #show-model-warnings: true diff --git a/aider/website/assets/sample.env b/aider/website/assets/sample.env index 963c64b2d..556dd8221 100644 --- a/aider/website/assets/sample.env +++ b/aider/website/assets/sample.env @@ -54,6 +54,12 @@ ## Use deepseek/deepseek-coder model for the main chat #AIDER_DEEPSEEK= +## Use o1-mini model for the main chat +#AIDER_O1_MINI= + +## Use o1-preview model for the main chat +#AIDER_O1_PREVIEW= + ################# # Model Settings: @@ -87,9 +93,18 @@ ## Specify what edit format the LLM should use (default depends on model) #AIDER_EDIT_FORMAT= +## Use senior edit format for the main chat +#AIDER_SENIOR= + ## Specify the model to use for commit messages and chat history summarization (default depends on --model) #AIDER_WEAK_MODEL= +## Specify the model to use for junior tasks (default depends on --model) +#AIDER_JUNIOR_MODEL= + +## Specify the edit format for the junior model (default: depends on junior model) +#AIDER_JUNIOR_EDIT_FORMAT= + ## Only work with models that have meta-data available (default: True) #AIDER_SHOW_MODEL_WARNINGS=true diff --git a/aider/website/docs/config/adv-model-settings.md b/aider/website/docs/config/adv-model-settings.md index a69569b75..b0c6c9015 100644 --- a/aider/website/docs/config/adv-model-settings.md +++ b/aider/website/docs/config/adv-model-settings.md @@ -88,6 +88,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-3.5-turbo @@ -105,6 +107,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-3.5-turbo-0125 @@ -122,6 +126,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-3.5-turbo-1106 @@ -139,6 +145,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-3.5-turbo-0613 @@ -156,6 +164,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-3.5-turbo-16k-0613 @@ -173,6 +183,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4-turbo-2024-04-09 @@ -190,6 +202,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4-turbo @@ -207,6 +221,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: null lazy: true max_tokens: null name: openai/gpt-4o @@ -224,6 +240,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: openai/gpt-4o-2024-08-06 @@ -241,6 +259,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4o-2024-08-06 @@ -258,6 +278,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: null lazy: true max_tokens: null name: gpt-4o @@ -275,6 +297,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4o-mini @@ -292,6 +316,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: openai/gpt-4o-mini @@ -309,6 +335,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4-0125-preview @@ -326,6 +354,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: true max_tokens: null name: gpt-4-1106-preview @@ -343,6 +373,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-4-vision-preview @@ -360,6 +392,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-4-0314 @@ -377,6 +411,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-4-0613 @@ -394,6 +430,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gpt-4-32k-0613 @@ -411,6 +449,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: claude-3-opus-20240229 @@ -428,6 +468,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: openrouter/anthropic/claude-3-opus @@ -445,6 +487,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: claude-3-sonnet-20240229 @@ -463,6 +507,8 @@ cog.out("```\n") extra_body: null extra_headers: anthropic-beta: prompt-caching-2024-07-31 + junior_edit_format: junior-diff + junior_model_name: claude-3-5-sonnet-20240620 lazy: false max_tokens: 8192 name: claude-3-5-sonnet-20240620 @@ -481,6 +527,8 @@ cog.out("```\n") extra_body: null extra_headers: anthropic-beta: prompt-caching-2024-07-31 + junior_edit_format: junior-diff + junior_model_name: anthropic/claude-3-5-sonnet-20240620 lazy: false max_tokens: 8192 name: anthropic/claude-3-5-sonnet-20240620 @@ -499,6 +547,8 @@ cog.out("```\n") extra_body: null extra_headers: anthropic-beta: prompt-caching-2024-07-31 + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: anthropic/claude-3-haiku-20240307 @@ -517,6 +567,8 @@ cog.out("```\n") extra_body: null extra_headers: anthropic-beta: prompt-caching-2024-07-31 + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: claude-3-haiku-20240307 @@ -534,6 +586,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: openrouter/anthropic/claude-3.5-sonnet lazy: false max_tokens: 8192 name: openrouter/anthropic/claude-3.5-sonnet @@ -551,6 +605,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: vertex_ai/claude-3-5-sonnet@20240620 lazy: false max_tokens: 8192 name: vertex_ai/claude-3-5-sonnet@20240620 @@ -568,6 +624,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: vertex_ai/claude-3-opus@20240229 @@ -585,6 +643,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: vertex_ai/claude-3-sonnet@20240229 @@ -602,6 +662,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: command-r-plus @@ -619,6 +681,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: command-r-08-2024 @@ -636,6 +700,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: command-r-plus-08-2024 @@ -653,6 +719,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: groq/llama3-70b-8192 @@ -670,6 +738,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: openrouter/meta-llama/llama-3-70b-instruct @@ -687,6 +757,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-pro-002 @@ -704,6 +776,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-flash-002 @@ -721,6 +795,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-pro @@ -738,6 +814,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-pro-latest @@ -755,6 +833,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-pro-exp-0827 @@ -772,6 +852,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: gemini/gemini-1.5-flash-exp-0827 @@ -789,6 +871,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: 8192 name: deepseek/deepseek-chat @@ -806,6 +890,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: 8192 name: deepseek/deepseek-coder @@ -823,6 +909,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: 8192 name: deepseek-chat @@ -840,6 +928,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: 8192 name: deepseek-coder @@ -857,6 +947,8 @@ cog.out("```\n") examples_as_sys_msg: true extra_body: null extra_headers: null + junior_edit_format: null + junior_model_name: null lazy: false max_tokens: null name: openrouter/deepseek/deepseek-coder @@ -874,6 +966,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: null lazy: true max_tokens: null name: openrouter/openai/gpt-4o @@ -891,6 +985,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: openai/gpt-4o lazy: false max_tokens: null name: openai/o1-mini @@ -908,6 +1004,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: gpt-4o lazy: false max_tokens: null name: o1-mini @@ -925,6 +1023,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: openai/gpt-4o lazy: false max_tokens: null name: openai/o1-preview @@ -938,10 +1038,12 @@ cog.out("```\n") - accepts_images: false cache_control: false caches_by_default: false - edit_format: diff + edit_format: senior examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: gpt-4o lazy: false max_tokens: null name: o1-preview @@ -959,6 +1061,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: openrouter/openai/gpt-4o lazy: false max_tokens: null name: openrouter/openai/o1-mini @@ -976,6 +1080,8 @@ cog.out("```\n") examples_as_sys_msg: false extra_body: null extra_headers: null + junior_edit_format: junior-diff + junior_model_name: openrouter/openai/gpt-4o lazy: false max_tokens: null name: openrouter/openai/o1-preview diff --git a/aider/website/docs/config/aider_conf.md b/aider/website/docs/config/aider_conf.md index c4693bd3f..f67fefdf8 100644 --- a/aider/website/docs/config/aider_conf.md +++ b/aider/website/docs/config/aider_conf.md @@ -98,6 +98,12 @@ cog.outl("```") ## Use deepseek/deepseek-coder model for the main chat #deepseek: false +## Use o1-mini model for the main chat +#o1-mini: false + +## Use o1-preview model for the main chat +#o1-preview: false + ################# # Model Settings: @@ -131,9 +137,18 @@ cog.outl("```") ## Specify what edit format the LLM should use (default depends on model) #edit-format: xxx +## Use senior edit format for the main chat +#senior: false + ## Specify the model to use for commit messages and chat history summarization (default depends on --model) #weak-model: xxx +## Specify the model to use for junior tasks (default depends on --model) +#junior-model: xxx + +## Specify the edit format for the junior model (default: depends on junior model) +#junior-edit-format: xxx + ## Only work with models that have meta-data available (default: True) #show-model-warnings: true diff --git a/aider/website/docs/config/dotenv.md b/aider/website/docs/config/dotenv.md index d3120540e..716ab707c 100644 --- a/aider/website/docs/config/dotenv.md +++ b/aider/website/docs/config/dotenv.md @@ -96,6 +96,12 @@ cog.outl("```") ## Use deepseek/deepseek-coder model for the main chat #AIDER_DEEPSEEK= +## Use o1-mini model for the main chat +#AIDER_O1_MINI= + +## Use o1-preview model for the main chat +#AIDER_O1_PREVIEW= + ################# # Model Settings: @@ -129,9 +135,18 @@ cog.outl("```") ## Specify what edit format the LLM should use (default depends on model) #AIDER_EDIT_FORMAT= +## Use senior edit format for the main chat +#AIDER_SENIOR= + ## Specify the model to use for commit messages and chat history summarization (default depends on --model) #AIDER_WEAK_MODEL= +## Specify the model to use for junior tasks (default depends on --model) +#AIDER_JUNIOR_MODEL= + +## Specify the edit format for the junior model (default: depends on junior model) +#AIDER_JUNIOR_EDIT_FORMAT= + ## Only work with models that have meta-data available (default: True) #AIDER_SHOW_MODEL_WARNINGS=true diff --git a/aider/website/docs/config/options.md b/aider/website/docs/config/options.md index e14450d95..92bcd5c24 100644 --- a/aider/website/docs/config/options.md +++ b/aider/website/docs/config/options.md @@ -27,13 +27,14 @@ cog.out(get_md_help()) ``` usage: aider [-h] [--openai-api-key] [--anthropic-api-key] [--model] [--opus] [--sonnet] [--4] [--4o] [--mini] [--4-turbo] - [--35turbo] [--deepseek] [--list-models] - [--openai-api-base] [--openai-api-type] + [--35turbo] [--deepseek] [--o1-mini] [--o1-preview] + [--list-models] [--openai-api-base] [--openai-api-type] [--openai-api-version] [--openai-api-deployment-id] [--openai-organization-id] [--model-settings-file] [--model-metadata-file] [--verify-ssl | --no-verify-ssl] [--edit-format] - [--weak-model] + [--senior] [--weak-model] [--junior-model] + [--junior-edit-format] [--show-model-warnings | --no-show-model-warnings] [--max-chat-history-tokens] [--env-file] [--cache-prompts | --no-cache-prompts] @@ -133,6 +134,14 @@ Aliases: Use deepseek/deepseek-coder model for the main chat Environment variable: `AIDER_DEEPSEEK` +### `--o1-mini` +Use o1-mini model for the main chat +Environment variable: `AIDER_O1_MINI` + +### `--o1-preview` +Use o1-preview model for the main chat +Environment variable: `AIDER_O1_PREVIEW` + ## Model Settings: ### `--list-models MODEL` @@ -187,10 +196,22 @@ Aliases: - `--edit-format EDIT_FORMAT` - `--chat-mode EDIT_FORMAT` +### `--senior` +Use senior edit format for the main chat +Environment variable: `AIDER_SENIOR` + ### `--weak-model WEAK_MODEL` Specify the model to use for commit messages and chat history summarization (default depends on --model) Environment variable: `AIDER_WEAK_MODEL` +### `--junior-model JUNIOR_MODEL` +Specify the model to use for junior tasks (default depends on --model) +Environment variable: `AIDER_JUNIOR_MODEL` + +### `--junior-edit-format JUNIOR_EDIT_FORMAT` +Specify the edit format for the junior model (default: depends on junior model) +Environment variable: `AIDER_JUNIOR_EDIT_FORMAT` + ### `--show-model-warnings` Only work with models that have meta-data available (default: True) Default: True diff --git a/aider/website/docs/leaderboards/index.md b/aider/website/docs/leaderboards/index.md index 94d9faf2f..62b9f6abc 100644 --- a/aider/website/docs/leaderboards/index.md +++ b/aider/website/docs/leaderboards/index.md @@ -321,6 +321,6 @@ mod_dates = [get_last_modified_date(file) for file in files] latest_mod_date = max(mod_dates) cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}") ]]]--> -September 24, 2024. +September 26, 2024.