This commit is contained in:
Paul Gauthier 2024-09-26 11:35:33 -07:00
parent 5a78e7d1b8
commit c3b9d34e24
7 changed files with 192 additions and 5 deletions

View file

@ -50,6 +50,12 @@
## Use deepseek/deepseek-coder model for the main chat
#deepseek: false
## Use o1-mini model for the main chat
#o1-mini: false
## Use o1-preview model for the main chat
#o1-preview: false
#################
# Model Settings:
@ -83,9 +89,18 @@
## Specify what edit format the LLM should use (default depends on model)
#edit-format: xxx
## Use senior edit format for the main chat
#senior: false
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
#weak-model: xxx
## Specify the model to use for junior tasks (default depends on --model)
#junior-model: xxx
## Specify the edit format for the junior model (default: depends on junior model)
#junior-edit-format: xxx
## Only work with models that have meta-data available (default: True)
#show-model-warnings: true

View file

@ -54,6 +54,12 @@
## Use deepseek/deepseek-coder model for the main chat
#AIDER_DEEPSEEK=
## Use o1-mini model for the main chat
#AIDER_O1_MINI=
## Use o1-preview model for the main chat
#AIDER_O1_PREVIEW=
#################
# Model Settings:
@ -87,9 +93,18 @@
## Specify what edit format the LLM should use (default depends on model)
#AIDER_EDIT_FORMAT=
## Use senior edit format for the main chat
#AIDER_SENIOR=
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
#AIDER_WEAK_MODEL=
## Specify the model to use for junior tasks (default depends on --model)
#AIDER_JUNIOR_MODEL=
## Specify the edit format for the junior model (default: depends on junior model)
#AIDER_JUNIOR_EDIT_FORMAT=
## Only work with models that have meta-data available (default: True)
#AIDER_SHOW_MODEL_WARNINGS=true

View file

@ -88,6 +88,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-3.5-turbo
@ -105,6 +107,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-3.5-turbo-0125
@ -122,6 +126,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-3.5-turbo-1106
@ -139,6 +145,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-3.5-turbo-0613
@ -156,6 +164,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-3.5-turbo-16k-0613
@ -173,6 +183,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4-turbo-2024-04-09
@ -190,6 +202,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4-turbo
@ -207,6 +221,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: null
lazy: true
max_tokens: null
name: openai/gpt-4o
@ -224,6 +240,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: openai/gpt-4o-2024-08-06
@ -241,6 +259,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4o-2024-08-06
@ -258,6 +278,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4o
@ -275,6 +297,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4o-mini
@ -292,6 +316,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: openai/gpt-4o-mini
@ -309,6 +335,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4-0125-preview
@ -326,6 +354,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: true
max_tokens: null
name: gpt-4-1106-preview
@ -343,6 +373,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-4-vision-preview
@ -360,6 +392,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-4-0314
@ -377,6 +411,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-4-0613
@ -394,6 +430,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gpt-4-32k-0613
@ -411,6 +449,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: claude-3-opus-20240229
@ -428,6 +468,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: openrouter/anthropic/claude-3-opus
@ -445,6 +487,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: claude-3-sonnet-20240229
@ -463,6 +507,8 @@ cog.out("```\n")
extra_body: null
extra_headers:
anthropic-beta: prompt-caching-2024-07-31
junior_edit_format: junior-diff
junior_model_name: claude-3-5-sonnet-20240620
lazy: false
max_tokens: 8192
name: claude-3-5-sonnet-20240620
@ -481,6 +527,8 @@ cog.out("```\n")
extra_body: null
extra_headers:
anthropic-beta: prompt-caching-2024-07-31
junior_edit_format: junior-diff
junior_model_name: anthropic/claude-3-5-sonnet-20240620
lazy: false
max_tokens: 8192
name: anthropic/claude-3-5-sonnet-20240620
@ -499,6 +547,8 @@ cog.out("```\n")
extra_body: null
extra_headers:
anthropic-beta: prompt-caching-2024-07-31
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: anthropic/claude-3-haiku-20240307
@ -517,6 +567,8 @@ cog.out("```\n")
extra_body: null
extra_headers:
anthropic-beta: prompt-caching-2024-07-31
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: claude-3-haiku-20240307
@ -534,6 +586,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: openrouter/anthropic/claude-3.5-sonnet
lazy: false
max_tokens: 8192
name: openrouter/anthropic/claude-3.5-sonnet
@ -551,6 +605,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: vertex_ai/claude-3-5-sonnet@20240620
lazy: false
max_tokens: 8192
name: vertex_ai/claude-3-5-sonnet@20240620
@ -568,6 +624,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: vertex_ai/claude-3-opus@20240229
@ -585,6 +643,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: vertex_ai/claude-3-sonnet@20240229
@ -602,6 +662,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: command-r-plus
@ -619,6 +681,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: command-r-08-2024
@ -636,6 +700,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: command-r-plus-08-2024
@ -653,6 +719,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: groq/llama3-70b-8192
@ -670,6 +738,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: openrouter/meta-llama/llama-3-70b-instruct
@ -687,6 +757,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-pro-002
@ -704,6 +776,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-flash-002
@ -721,6 +795,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-pro
@ -738,6 +814,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-pro-latest
@ -755,6 +833,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-pro-exp-0827
@ -772,6 +852,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: gemini/gemini-1.5-flash-exp-0827
@ -789,6 +871,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: 8192
name: deepseek/deepseek-chat
@ -806,6 +890,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: 8192
name: deepseek/deepseek-coder
@ -823,6 +909,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: 8192
name: deepseek-chat
@ -840,6 +928,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: 8192
name: deepseek-coder
@ -857,6 +947,8 @@ cog.out("```\n")
examples_as_sys_msg: true
extra_body: null
extra_headers: null
junior_edit_format: null
junior_model_name: null
lazy: false
max_tokens: null
name: openrouter/deepseek/deepseek-coder
@ -874,6 +966,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: null
lazy: true
max_tokens: null
name: openrouter/openai/gpt-4o
@ -891,6 +985,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: openai/gpt-4o
lazy: false
max_tokens: null
name: openai/o1-mini
@ -908,6 +1004,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: gpt-4o
lazy: false
max_tokens: null
name: o1-mini
@ -925,6 +1023,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: openai/gpt-4o
lazy: false
max_tokens: null
name: openai/o1-preview
@ -938,10 +1038,12 @@ cog.out("```\n")
- accepts_images: false
cache_control: false
caches_by_default: false
edit_format: diff
edit_format: senior
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: gpt-4o
lazy: false
max_tokens: null
name: o1-preview
@ -959,6 +1061,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: openrouter/openai/gpt-4o
lazy: false
max_tokens: null
name: openrouter/openai/o1-mini
@ -976,6 +1080,8 @@ cog.out("```\n")
examples_as_sys_msg: false
extra_body: null
extra_headers: null
junior_edit_format: junior-diff
junior_model_name: openrouter/openai/gpt-4o
lazy: false
max_tokens: null
name: openrouter/openai/o1-preview

View file

@ -98,6 +98,12 @@ cog.outl("```")
## Use deepseek/deepseek-coder model for the main chat
#deepseek: false
## Use o1-mini model for the main chat
#o1-mini: false
## Use o1-preview model for the main chat
#o1-preview: false
#################
# Model Settings:
@ -131,9 +137,18 @@ cog.outl("```")
## Specify what edit format the LLM should use (default depends on model)
#edit-format: xxx
## Use senior edit format for the main chat
#senior: false
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
#weak-model: xxx
## Specify the model to use for junior tasks (default depends on --model)
#junior-model: xxx
## Specify the edit format for the junior model (default: depends on junior model)
#junior-edit-format: xxx
## Only work with models that have meta-data available (default: True)
#show-model-warnings: true

View file

@ -96,6 +96,12 @@ cog.outl("```")
## Use deepseek/deepseek-coder model for the main chat
#AIDER_DEEPSEEK=
## Use o1-mini model for the main chat
#AIDER_O1_MINI=
## Use o1-preview model for the main chat
#AIDER_O1_PREVIEW=
#################
# Model Settings:
@ -129,9 +135,18 @@ cog.outl("```")
## Specify what edit format the LLM should use (default depends on model)
#AIDER_EDIT_FORMAT=
## Use senior edit format for the main chat
#AIDER_SENIOR=
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
#AIDER_WEAK_MODEL=
## Specify the model to use for junior tasks (default depends on --model)
#AIDER_JUNIOR_MODEL=
## Specify the edit format for the junior model (default: depends on junior model)
#AIDER_JUNIOR_EDIT_FORMAT=
## Only work with models that have meta-data available (default: True)
#AIDER_SHOW_MODEL_WARNINGS=true

View file

@ -27,13 +27,14 @@ cog.out(get_md_help())
```
usage: aider [-h] [--openai-api-key] [--anthropic-api-key] [--model]
[--opus] [--sonnet] [--4] [--4o] [--mini] [--4-turbo]
[--35turbo] [--deepseek] [--list-models]
[--openai-api-base] [--openai-api-type]
[--35turbo] [--deepseek] [--o1-mini] [--o1-preview]
[--list-models] [--openai-api-base] [--openai-api-type]
[--openai-api-version] [--openai-api-deployment-id]
[--openai-organization-id] [--model-settings-file]
[--model-metadata-file]
[--verify-ssl | --no-verify-ssl] [--edit-format]
[--weak-model]
[--senior] [--weak-model] [--junior-model]
[--junior-edit-format]
[--show-model-warnings | --no-show-model-warnings]
[--max-chat-history-tokens] [--env-file]
[--cache-prompts | --no-cache-prompts]
@ -133,6 +134,14 @@ Aliases:
Use deepseek/deepseek-coder model for the main chat
Environment variable: `AIDER_DEEPSEEK`
### `--o1-mini`
Use o1-mini model for the main chat
Environment variable: `AIDER_O1_MINI`
### `--o1-preview`
Use o1-preview model for the main chat
Environment variable: `AIDER_O1_PREVIEW`
## Model Settings:
### `--list-models MODEL`
@ -187,10 +196,22 @@ Aliases:
- `--edit-format EDIT_FORMAT`
- `--chat-mode EDIT_FORMAT`
### `--senior`
Use senior edit format for the main chat
Environment variable: `AIDER_SENIOR`
### `--weak-model WEAK_MODEL`
Specify the model to use for commit messages and chat history summarization (default depends on --model)
Environment variable: `AIDER_WEAK_MODEL`
### `--junior-model JUNIOR_MODEL`
Specify the model to use for junior tasks (default depends on --model)
Environment variable: `AIDER_JUNIOR_MODEL`
### `--junior-edit-format JUNIOR_EDIT_FORMAT`
Specify the edit format for the junior model (default: depends on junior model)
Environment variable: `AIDER_JUNIOR_EDIT_FORMAT`
### `--show-model-warnings`
Only work with models that have meta-data available (default: True)
Default: True

View file

@ -321,6 +321,6 @@ mod_dates = [get_last_modified_date(file) for file in files]
latest_mod_date = max(mod_dates)
cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}")
]]]-->
September 24, 2024.
September 26, 2024.
<!--[[[end]]]-->
</p>