mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-12 07:35:00 +00:00
architect/editor
This commit is contained in:
parent
b551e29de3
commit
eb21cf2830
23 changed files with 337 additions and 337 deletions
|
@ -55,7 +55,7 @@ about prompting GPT for complex tasks like coding. It's beneficial to
|
|||
minimize the "cognitive overhead" of formatting the response, allowing
|
||||
GPT to concentrate on the coding task at hand.
|
||||
|
||||
As a thought experiment, imagine a slack conversation with a junior developer where
|
||||
As a thought experiment, imagine a slack conversation with a editor developer where
|
||||
you ask them to write the code to add some new feature to your app.
|
||||
They're going to type the response back to you by hand in the chat.
|
||||
Should they type out the
|
||||
|
|
|
@ -85,11 +85,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo
|
||||
|
@ -104,11 +104,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-0125
|
||||
|
@ -123,11 +123,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-1106
|
||||
|
@ -142,11 +142,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-0613
|
||||
|
@ -161,11 +161,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-3.5-turbo-16k-0613
|
||||
|
@ -180,11 +180,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-turbo-2024-04-09
|
||||
|
@ -199,11 +199,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-turbo
|
||||
|
@ -218,11 +218,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o
|
||||
|
@ -237,11 +237,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o-2024-08-06
|
||||
|
@ -256,11 +256,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o-2024-08-06
|
||||
|
@ -275,11 +275,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o
|
||||
|
@ -294,11 +294,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4o-mini
|
||||
|
@ -313,11 +313,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openai/gpt-4o-mini
|
||||
|
@ -332,11 +332,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-0125-preview
|
||||
|
@ -351,11 +351,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: udiff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: gpt-4-1106-preview
|
||||
|
@ -370,11 +370,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-vision-preview
|
||||
|
@ -389,11 +389,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-0314
|
||||
|
@ -408,11 +408,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-0613
|
||||
|
@ -427,11 +427,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gpt-4-32k-0613
|
||||
|
@ -446,11 +446,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-opus-20240229
|
||||
|
@ -465,11 +465,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/anthropic/claude-3-opus
|
||||
|
@ -484,11 +484,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-sonnet-20240229
|
||||
|
@ -503,12 +503,12 @@ cog.out("```\n")
|
|||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: claude-3-5-sonnet-20240620
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: claude-3-5-sonnet-20240620
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: claude-3-5-sonnet-20240620
|
||||
|
@ -523,12 +523,12 @@ cog.out("```\n")
|
|||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: anthropic/claude-3-5-sonnet-20240620
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: anthropic/claude-3-5-sonnet-20240620
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: anthropic/claude-3-5-sonnet-20240620
|
||||
|
@ -543,12 +543,12 @@ cog.out("```\n")
|
|||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: anthropic/claude-3-haiku-20240307
|
||||
|
@ -563,12 +563,12 @@ cog.out("```\n")
|
|||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: claude-3-haiku-20240307
|
||||
|
@ -583,11 +583,11 @@ cog.out("```\n")
|
|||
cache_control: true
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: openrouter/anthropic/claude-3.5-sonnet
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: openrouter/anthropic/claude-3.5-sonnet
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: openrouter/anthropic/claude-3.5-sonnet
|
||||
|
@ -602,11 +602,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: vertex_ai/claude-3-5-sonnet@20240620
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: vertex_ai/claude-3-5-sonnet@20240620
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: vertex_ai/claude-3-5-sonnet@20240620
|
||||
|
@ -621,11 +621,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: vertex_ai/claude-3-opus@20240229
|
||||
|
@ -640,11 +640,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: vertex_ai/claude-3-sonnet@20240229
|
||||
|
@ -659,11 +659,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-plus
|
||||
|
@ -678,11 +678,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-08-2024
|
||||
|
@ -697,11 +697,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: command-r-plus-08-2024
|
||||
|
@ -716,11 +716,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: groq/llama3-70b-8192
|
||||
|
@ -735,11 +735,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/meta-llama/llama-3-70b-instruct
|
||||
|
@ -754,11 +754,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro-002
|
||||
|
@ -773,11 +773,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-flash-002
|
||||
|
@ -792,11 +792,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro
|
||||
|
@ -811,11 +811,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro-latest
|
||||
|
@ -830,11 +830,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff-fenced
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-pro-exp-0827
|
||||
|
@ -849,11 +849,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: gemini/gemini-1.5-flash-exp-0827
|
||||
|
@ -868,11 +868,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek/deepseek-chat
|
||||
|
@ -887,11 +887,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: true
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek/deepseek-coder
|
||||
|
@ -906,11 +906,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek-chat
|
||||
|
@ -925,11 +925,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: true
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: 8192
|
||||
name: deepseek-coder
|
||||
|
@ -944,11 +944,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: null
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: true
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: null
|
||||
junior_model_name: null
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/deepseek/deepseek-coder
|
||||
|
@ -963,11 +963,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: null
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: null
|
||||
lazy: true
|
||||
max_tokens: null
|
||||
name: openrouter/openai/gpt-4o
|
||||
|
@ -982,11 +982,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: openai/gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: openai/gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openai/o1-mini
|
||||
|
@ -1001,11 +1001,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: o1-mini
|
||||
|
@ -1020,11 +1020,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: openai/gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: openai/gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openai/o1-preview
|
||||
|
@ -1038,12 +1038,12 @@ cog.out("```\n")
|
|||
- accepts_images: false
|
||||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: senior
|
||||
edit_format: architect
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: o1-preview
|
||||
|
@ -1058,11 +1058,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: whole
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: openrouter/openai/gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: openrouter/openai/gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/openai/o1-mini
|
||||
|
@ -1077,11 +1077,11 @@ cog.out("```\n")
|
|||
cache_control: false
|
||||
caches_by_default: false
|
||||
edit_format: diff
|
||||
editor_edit_format: editor-diff
|
||||
editor_model_name: openrouter/openai/gpt-4o
|
||||
examples_as_sys_msg: false
|
||||
extra_body: null
|
||||
extra_headers: null
|
||||
junior_edit_format: junior-diff
|
||||
junior_model_name: openrouter/openai/gpt-4o
|
||||
lazy: false
|
||||
max_tokens: null
|
||||
name: openrouter/openai/o1-preview
|
||||
|
|
|
@ -137,17 +137,17 @@ cog.outl("```")
|
|||
## Specify what edit format the LLM should use (default depends on model)
|
||||
#edit-format: xxx
|
||||
|
||||
## Use senior edit format for the main chat
|
||||
#senior: false
|
||||
## Use architect edit format for the main chat
|
||||
#architect: false
|
||||
|
||||
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
|
||||
#weak-model: xxx
|
||||
|
||||
## Specify the model to use for junior tasks (default depends on --model)
|
||||
#junior-model: xxx
|
||||
## Specify the model to use for editor tasks (default depends on --model)
|
||||
#editor-model: xxx
|
||||
|
||||
## Specify the edit format for the junior model (default: depends on junior model)
|
||||
#junior-edit-format: xxx
|
||||
## Specify the edit format for the editor model (default: depends on editor model)
|
||||
#editor-edit-format: xxx
|
||||
|
||||
## Only work with models that have meta-data available (default: True)
|
||||
#show-model-warnings: true
|
||||
|
|
|
@ -135,17 +135,17 @@ cog.outl("```")
|
|||
## Specify what edit format the LLM should use (default depends on model)
|
||||
#AIDER_EDIT_FORMAT=
|
||||
|
||||
## Use senior edit format for the main chat
|
||||
#AIDER_SENIOR=
|
||||
## Use architect edit format for the main chat
|
||||
#AIDER_ARCHITECT=
|
||||
|
||||
## Specify the model to use for commit messages and chat history summarization (default depends on --model)
|
||||
#AIDER_WEAK_MODEL=
|
||||
|
||||
## Specify the model to use for junior tasks (default depends on --model)
|
||||
#AIDER_JUNIOR_MODEL=
|
||||
## Specify the model to use for editor tasks (default depends on --model)
|
||||
#AIDER_EDITOR_MODEL=
|
||||
|
||||
## Specify the edit format for the junior model (default: depends on junior model)
|
||||
#AIDER_JUNIOR_EDIT_FORMAT=
|
||||
## Specify the edit format for the editor model (default: depends on editor model)
|
||||
#AIDER_EDITOR_EDIT_FORMAT=
|
||||
|
||||
## Only work with models that have meta-data available (default: True)
|
||||
#AIDER_SHOW_MODEL_WARNINGS=true
|
||||
|
|
|
@ -33,8 +33,8 @@ usage: aider [-h] [--openai-api-key] [--anthropic-api-key] [--model]
|
|||
[--openai-organization-id] [--model-settings-file]
|
||||
[--model-metadata-file]
|
||||
[--verify-ssl | --no-verify-ssl] [--edit-format]
|
||||
[--senior] [--weak-model] [--junior-model]
|
||||
[--junior-edit-format]
|
||||
[--architect] [--weak-model] [--editor-model]
|
||||
[--editor-edit-format]
|
||||
[--show-model-warnings | --no-show-model-warnings]
|
||||
[--max-chat-history-tokens] [--env-file]
|
||||
[--cache-prompts | --no-cache-prompts]
|
||||
|
@ -196,21 +196,21 @@ Aliases:
|
|||
- `--edit-format EDIT_FORMAT`
|
||||
- `--chat-mode EDIT_FORMAT`
|
||||
|
||||
### `--senior`
|
||||
Use senior edit format for the main chat
|
||||
Environment variable: `AIDER_SENIOR`
|
||||
### `--architect`
|
||||
Use architect edit format for the main chat
|
||||
Environment variable: `AIDER_ARCHITECT`
|
||||
|
||||
### `--weak-model WEAK_MODEL`
|
||||
Specify the model to use for commit messages and chat history summarization (default depends on --model)
|
||||
Environment variable: `AIDER_WEAK_MODEL`
|
||||
|
||||
### `--junior-model JUNIOR_MODEL`
|
||||
Specify the model to use for junior tasks (default depends on --model)
|
||||
Environment variable: `AIDER_JUNIOR_MODEL`
|
||||
### `--editor-model JUNIOR_MODEL`
|
||||
Specify the model to use for editor tasks (default depends on --model)
|
||||
Environment variable: `AIDER_EDITOR_MODEL`
|
||||
|
||||
### `--junior-edit-format JUNIOR_EDIT_FORMAT`
|
||||
Specify the edit format for the junior model (default: depends on junior model)
|
||||
Environment variable: `AIDER_JUNIOR_EDIT_FORMAT`
|
||||
### `--editor-edit-format JUNIOR_EDIT_FORMAT`
|
||||
Specify the edit format for the editor model (default: depends on editor model)
|
||||
Environment variable: `AIDER_EDITOR_EDIT_FORMAT`
|
||||
|
||||
### `--show-model-warnings`
|
||||
Only work with models that have meta-data available (default: True)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue