mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-20 12:24:59 +00:00
copy
This commit is contained in:
parent
6a733f8e76
commit
a1bd0c97ee
2 changed files with 837 additions and 25 deletions
|
@ -66,31 +66,842 @@ create a `.aider.model.settings.yml` file in one of these locations:
|
|||
If the files above exist, they will be loaded in that order.
|
||||
Files loaded last will take priority.
|
||||
|
||||
The yaml file should be a a list of dictionary objects for each model, as follows:
|
||||
The yaml file should be a a list of dictionary objects for each model.
|
||||
For example, below are all the pre-configured model settings
|
||||
to give a sense for the settings which are supported.
|
||||
|
||||
```
|
||||
- name: "gpt-3.5-turbo"
|
||||
edit_format: "whole"
|
||||
weak_model_name: "gpt-3.5-turbo"
|
||||
use_repo_map: false
|
||||
send_undo_reply: false
|
||||
accepts_images: false
|
||||
lazy: false
|
||||
reminder: sys
|
||||
examples_as_sys_msg: false
|
||||
- name: "gpt-4-turbo-2024-04-09"
|
||||
edit_format: "udiff"
|
||||
weak_model_name: "gpt-3.5-turbo"
|
||||
use_repo_map: true
|
||||
send_undo_reply: true
|
||||
accepts_images: true
|
||||
lazy: true
|
||||
reminder: sys
|
||||
examples_as_sys_msg: false
|
||||
```
|
||||
|
||||
You can look at the `ModelSettings` class in
|
||||
You can also look at the `ModelSettings` class in
|
||||
[models.py](https://github.com/paul-gauthier/aider/blob/main/aider/models.py)
|
||||
file for details about all of the model setting that aider supports.
|
||||
That file also contains the settings for many popular models.
|
||||
file for more details about all of the model setting that aider supports.
|
||||
|
||||
<!--[[[cog
|
||||
from aider.models import get_model_settings_as_yaml
|
||||
cog.out("```yaml\n")
|
||||
cog.out(get_model_settings_as_yaml())
|
||||
cog.out("```\n")
|
||||
]]]-->
|
||||
```yaml
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-0125
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-1106
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-0613
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-16k-0613
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-turbo-2024-04-09
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-turbo
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o-2024-08-06
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o-2024-08-06
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o-mini
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o-mini
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: openai/gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-0125-preview
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-1106-preview
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-vision-preview
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-0314
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-0613
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-32k-0613
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-opus-20240229
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/anthropic/claude-3-opus
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: openrouter/anthropic/claude-3-haiku
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-sonnet-20240229
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
- accepts_images: true
|
||||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: claude-3-5-sonnet-20240620
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
- accepts_images: false
|
||||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: anthropic/claude-3-5-sonnet-20240620
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
- accepts_images: false
|
||||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: true
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: anthropic/claude-3-haiku-20240307
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: anthropic/claude-3-haiku-20240307
|
||||
- accepts_images: false
|
||||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: true
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-haiku-20240307
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
- accepts_images: true
|
||||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: openrouter/anthropic/claude-3.5-sonnet
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: openrouter/anthropic/claude-3-haiku-20240307
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: vertex_ai/claude-3-5-sonnet@20240620
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: vertex_ai/claude-3-haiku@20240307
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: vertex_ai/claude-3-opus@20240229
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: vertex_ai/claude-3-haiku@20240307
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: vertex_ai/claude-3-sonnet@20240229
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: vertex_ai/claude-3-haiku@20240307
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-plus
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: command-r-plus
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-08-2024
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: command-r-08-2024
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-plus-08-2024
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: command-r-plus-08-2024
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: groq/llama3-70b-8192
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: groq/llama3-8b-8192
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/meta-llama/llama-3-70b-instruct
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: openrouter/meta-llama/llama-3-70b-instruct
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro-latest
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro-exp-0827
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-flash-exp-0827
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: false
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek/deepseek-chat
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: true
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek/deepseek-coder
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek-chat
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: true
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek-coder
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: true
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/deepseek/deepseek-coder
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: null
|
||||
- accepts_images: true
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openrouter/openai/gpt-4o
|
||||
reminder: sys
|
||||
send_undo_reply: false
|
||||
streaming: true
|
||||
use_repo_map: true
|
||||
use_system_prompt: true
|
||||
use_temperature: true
|
||||
weak_model_name: openrouter/openai/gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openai/o1-mini
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: openai/gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: o1-mini
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openai/o1-preview
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: openai/gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: o1-preview
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/openai/o1-mini
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: openrouter/openai/gpt-4o-mini
|
||||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
examples_as_sys_msg: false
|
||||
extra_headers: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/openai/o1-preview
|
||||
reminder: user
|
||||
send_undo_reply: false
|
||||
streaming: false
|
||||
use_repo_map: true
|
||||
use_system_prompt: false
|
||||
use_temperature: false
|
||||
weak_model_name: openrouter/openai/gpt-4o-mini
|
||||
```
|
||||
<!--[[[end]]]-->
|
||||
|
||||
|
||||
|
|
|
@ -19,5 +19,6 @@ cog $ARG \
|
|||
aider/website/docs/config/dotenv.md \
|
||||
aider/website/docs/config/options.md \
|
||||
aider/website/docs/config/aider_conf.md \
|
||||
aider/website/docs/config/adv-model-settings.md \
|
||||
aider/website/docs/leaderboards/index.md \
|
||||
aider/website/docs/llms/other.md
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue