mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-03 03:05:00 +00:00
Merge branch 'main' of https://github.com/Aider-AI/aider
This commit is contained in:
commit
2cd9e57192
65 changed files with 1901 additions and 842 deletions
30
HISTORY.md
30
HISTORY.md
|
@ -1,5 +1,35 @@
|
|||
# Release history
|
||||
|
||||
### main branch
|
||||
|
||||
- Added support for Claude 3.7 Sonnet models on OpenRouter, Bedrock and Vertex AI.
|
||||
- Aider wrote 47% of the code in this release.
|
||||
|
||||
### Aider v0.75.1
|
||||
|
||||
- Added support for `openrouter/anthropic/claude-3.7-sonnet`
|
||||
|
||||
### Aider v0.75.0
|
||||
|
||||
- Basic support for Claude 3.7 Sonnet
|
||||
- Use `--model sonnet` to use the new 3.7
|
||||
- Thinking support coming soon.
|
||||
- Bugfix to `/editor` command.
|
||||
- Aider wrote 46% of the code in this release.
|
||||
|
||||
### Aider v0.74.3
|
||||
|
||||
- Downgrade streamlit dependency to avoid threading bug.
|
||||
- Added support for tree-sitter language pack.
|
||||
- Added openrouter/o3-mini-high model configuration.
|
||||
- Added build.gradle.kts to special files for Kotlin project support, by Lucas Shadler.
|
||||
|
||||
### Aider v0.74.2
|
||||
|
||||
- Prevent more than one cache warming thread from becoming active.
|
||||
- Fixed continuation prompt ". " for multiline input.
|
||||
- Added HCL (Terraform) syntax support, by Warren Krewenki.
|
||||
|
||||
### Aider v0.74.1
|
||||
|
||||
- Have o1 & o3-mini generate markdown by sending the magic "Formatting re-enabled." string.
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from packaging import version
|
||||
|
||||
__version__ = "0.74.2.dev"
|
||||
__version__ = "0.75.2.dev"
|
||||
safe_version = __version__
|
||||
|
||||
try:
|
||||
|
|
|
@ -46,7 +46,7 @@ def get_parser(default_config_files, git_root):
|
|||
const=opus_model,
|
||||
help=f"Use {opus_model} model for the main chat",
|
||||
)
|
||||
sonnet_model = "claude-3-5-sonnet-20241022"
|
||||
sonnet_model = "anthropic/claude-3-7-sonnet-20250219"
|
||||
group.add_argument(
|
||||
"--sonnet",
|
||||
action="store_const",
|
||||
|
@ -823,6 +823,12 @@ def get_parser(default_config_files, git_root):
|
|||
"--editor",
|
||||
help="Specify which editor to use for the /editor command",
|
||||
)
|
||||
group.add_argument(
|
||||
"--install-tree-sitter-language-pack",
|
||||
action="store_true",
|
||||
help="Install the tree_sitter_language_pack (experimental)",
|
||||
default=False,
|
||||
)
|
||||
|
||||
return parser
|
||||
|
||||
|
|
|
@ -1207,6 +1207,7 @@ class Coder:
|
|||
return
|
||||
|
||||
delay = 5 * 60 - 5
|
||||
delay = float(os.environ.get("AIDER_CACHE_KEEPALIVE_DELAY", delay))
|
||||
self.next_cache_warm = time.time() + delay
|
||||
self.warming_pings_left = self.num_cache_warming_pings
|
||||
self.cache_warming_chunks = chunks
|
||||
|
|
|
@ -404,6 +404,7 @@ class Commands:
|
|||
|
||||
fence = "`" * 3
|
||||
|
||||
file_res = []
|
||||
# files
|
||||
for fname in self.coder.abs_fnames:
|
||||
relative_fname = self.coder.get_rel_fname(fname)
|
||||
|
@ -414,7 +415,7 @@ class Commands:
|
|||
# approximate
|
||||
content = f"{relative_fname}\n{fence}\n" + content + "{fence}\n"
|
||||
tokens = self.coder.main_model.token_count(content)
|
||||
res.append((tokens, f"{relative_fname}", "/drop to remove"))
|
||||
file_res.append((tokens, f"{relative_fname}", "/drop to remove"))
|
||||
|
||||
# read-only files
|
||||
for fname in self.coder.abs_read_only_fnames:
|
||||
|
@ -424,7 +425,10 @@ class Commands:
|
|||
# approximate
|
||||
content = f"{relative_fname}\n{fence}\n" + content + "{fence}\n"
|
||||
tokens = self.coder.main_model.token_count(content)
|
||||
res.append((tokens, f"{relative_fname} (read-only)", "/drop to remove"))
|
||||
file_res.append((tokens, f"{relative_fname} (read-only)", "/drop to remove"))
|
||||
|
||||
file_res.sort()
|
||||
res.extend(file_res)
|
||||
|
||||
self.io.tool_output(
|
||||
f"Approximate context window usage for {self.coder.main_model.name}, in tokens:"
|
||||
|
|
|
@ -10,12 +10,13 @@ This module provides functionality to:
|
|||
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
from rich.console import Console
|
||||
|
||||
from aider.dump import dump # noqa
|
||||
|
||||
DEFAULT_EDITOR_NIX = "vi"
|
||||
DEFAULT_EDITOR_OS_X = "vim"
|
||||
DEFAULT_EDITOR_WINDOWS = "notepad"
|
||||
|
@ -87,13 +88,13 @@ def get_environment_editor(default=None):
|
|||
|
||||
def discover_editor(editor_override=None):
|
||||
"""
|
||||
Discovers and returns the appropriate editor command as a list of arguments.
|
||||
Discovers and returns the appropriate editor command.
|
||||
|
||||
Handles cases where the editor command includes arguments, including quoted arguments
|
||||
with spaces (e.g. 'vim -c "set noswapfile"').
|
||||
|
||||
:return: A list of command parts ready for subprocess execution
|
||||
:rtype: list[str]
|
||||
:return: The editor command as a string
|
||||
:rtype: str
|
||||
"""
|
||||
system = platform.system()
|
||||
if system == "Windows":
|
||||
|
@ -102,14 +103,13 @@ def discover_editor(editor_override=None):
|
|||
default_editor = DEFAULT_EDITOR_OS_X
|
||||
else:
|
||||
default_editor = DEFAULT_EDITOR_NIX
|
||||
|
||||
if editor_override:
|
||||
editor = editor_override
|
||||
else:
|
||||
editor = get_environment_editor(default_editor)
|
||||
try:
|
||||
return shlex.split(editor)
|
||||
except ValueError as e:
|
||||
raise RuntimeError(f"Invalid editor command format '{editor}': {e}")
|
||||
|
||||
return editor
|
||||
|
||||
|
||||
def pipe_editor(input_data="", suffix=None, editor=None):
|
||||
|
@ -128,9 +128,10 @@ def pipe_editor(input_data="", suffix=None, editor=None):
|
|||
:rtype: str
|
||||
"""
|
||||
filepath = write_temp_file(input_data, suffix)
|
||||
command_parts = discover_editor(editor)
|
||||
command_parts.append(filepath)
|
||||
subprocess.call(command_parts)
|
||||
command_str = discover_editor(editor)
|
||||
command_str += " " + filepath
|
||||
|
||||
subprocess.call(command_str, shell=True)
|
||||
with open(filepath, "r") as f:
|
||||
output_data = f.read()
|
||||
try:
|
||||
|
|
|
@ -8,7 +8,7 @@ from dataclasses import dataclass
|
|||
from pathlib import Path
|
||||
|
||||
from grep_ast import TreeContext, filename_to_lang
|
||||
from tree_sitter_languages import get_parser # noqa: E402
|
||||
from grep_ast.tsl import get_parser # noqa: E402
|
||||
|
||||
from aider.dump import dump # noqa: F401
|
||||
from aider.run_cmd import run_cmd_subprocess # noqa: F401
|
||||
|
|
|
@ -214,6 +214,18 @@ def check_streamlit_install(io):
|
|||
)
|
||||
|
||||
|
||||
def install_tree_sitter_language_pack(io):
|
||||
return utils.check_pip_install_extra(
|
||||
io,
|
||||
"tree_sitter_language_pack",
|
||||
"Install tree_sitter_language_pack?",
|
||||
[
|
||||
"tree-sitter-language-pack==0.4.0",
|
||||
"tree-sitter==0.24.0",
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def write_streamlit_credentials():
|
||||
from streamlit.file_util import get_streamlit_file_path
|
||||
|
||||
|
@ -706,6 +718,11 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
analytics.event("exit", reason="Upgrade completed")
|
||||
return 0 if success else 1
|
||||
|
||||
if args.install_tree_sitter_language_pack:
|
||||
success = install_tree_sitter_language_pack(io)
|
||||
analytics.event("exit", reason="Install TSLP completed")
|
||||
return 0 if success else 1
|
||||
|
||||
if args.check_update:
|
||||
check_version(io, verbose=args.verbose)
|
||||
|
||||
|
@ -1060,7 +1077,7 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
|
||||
while True:
|
||||
try:
|
||||
coder.ok_to_warm_cache = True
|
||||
coder.ok_to_warm_cache = bool(args.cache_keepalive_pings)
|
||||
coder.run()
|
||||
analytics.event("exit", reason="Completed main CLI coder.run")
|
||||
return
|
||||
|
|
|
@ -76,7 +76,7 @@ ANTHROPIC_MODELS = [ln.strip() for ln in ANTHROPIC_MODELS.splitlines() if ln.str
|
|||
# Mapping of model aliases to their canonical names
|
||||
MODEL_ALIASES = {
|
||||
# Claude models
|
||||
"sonnet": "claude-3-5-sonnet-20241022",
|
||||
"sonnet": "anthropic/claude-3-7-sonnet-20250219",
|
||||
"haiku": "claude-3-5-haiku-20241022",
|
||||
"opus": "claude-3-opus-20240229",
|
||||
# GPT models
|
||||
|
|
88
aider/queries/tree-sitter-language-pack/javascript-tags.scm
Normal file
88
aider/queries/tree-sitter-language-pack/javascript-tags.scm
Normal file
|
@ -0,0 +1,88 @@
|
|||
(
|
||||
(comment)* @doc
|
||||
.
|
||||
(method_definition
|
||||
name: (property_identifier) @name.definition.method) @definition.method
|
||||
(#not-eq? @name.definition.method "constructor")
|
||||
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
|
||||
(#select-adjacent! @doc @definition.method)
|
||||
)
|
||||
|
||||
(
|
||||
(comment)* @doc
|
||||
.
|
||||
[
|
||||
(class
|
||||
name: (_) @name.definition.class)
|
||||
(class_declaration
|
||||
name: (_) @name.definition.class)
|
||||
] @definition.class
|
||||
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
|
||||
(#select-adjacent! @doc @definition.class)
|
||||
)
|
||||
|
||||
(
|
||||
(comment)* @doc
|
||||
.
|
||||
[
|
||||
(function_expression
|
||||
name: (identifier) @name.definition.function)
|
||||
(function_declaration
|
||||
name: (identifier) @name.definition.function)
|
||||
(generator_function
|
||||
name: (identifier) @name.definition.function)
|
||||
(generator_function_declaration
|
||||
name: (identifier) @name.definition.function)
|
||||
] @definition.function
|
||||
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
|
||||
(#select-adjacent! @doc @definition.function)
|
||||
)
|
||||
|
||||
(
|
||||
(comment)* @doc
|
||||
.
|
||||
(lexical_declaration
|
||||
(variable_declarator
|
||||
name: (identifier) @name.definition.function
|
||||
value: [(arrow_function) (function_expression)]) @definition.function)
|
||||
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
|
||||
(#select-adjacent! @doc @definition.function)
|
||||
)
|
||||
|
||||
(
|
||||
(comment)* @doc
|
||||
.
|
||||
(variable_declaration
|
||||
(variable_declarator
|
||||
name: (identifier) @name.definition.function
|
||||
value: [(arrow_function) (function_expression)]) @definition.function)
|
||||
(#strip! @doc "^[\\s\\*/]+|^[\\s\\*/]$")
|
||||
(#select-adjacent! @doc @definition.function)
|
||||
)
|
||||
|
||||
(assignment_expression
|
||||
left: [
|
||||
(identifier) @name.definition.function
|
||||
(member_expression
|
||||
property: (property_identifier) @name.definition.function)
|
||||
]
|
||||
right: [(arrow_function) (function_expression)]
|
||||
) @definition.function
|
||||
|
||||
(pair
|
||||
key: (property_identifier) @name.definition.function
|
||||
value: [(arrow_function) (function_expression)]) @definition.function
|
||||
|
||||
(
|
||||
(call_expression
|
||||
function: (identifier) @name.reference.call) @reference.call
|
||||
(#not-match? @name.reference.call "^(require)$")
|
||||
)
|
||||
|
||||
(call_expression
|
||||
function: (member_expression
|
||||
property: (property_identifier) @name.reference.call)
|
||||
arguments: (_) @reference.call)
|
||||
|
||||
(new_expression
|
||||
constructor: (_) @name.reference.class) @reference.class
|
|
@ -23,7 +23,7 @@ from aider.utils import Spinner
|
|||
|
||||
# tree_sitter is throwing a FutureWarning
|
||||
warnings.simplefilter("ignore", category=FutureWarning)
|
||||
from tree_sitter_languages import get_language, get_parser # noqa: E402
|
||||
from grep_ast.tsl import USING_TSL_PACK, get_language, get_parser # noqa: E402
|
||||
|
||||
Tag = namedtuple("Tag", "rel_fname fname line name kind".split())
|
||||
|
||||
|
@ -31,8 +31,12 @@ Tag = namedtuple("Tag", "rel_fname fname line name kind".split())
|
|||
SQLITE_ERRORS = (sqlite3.OperationalError, sqlite3.DatabaseError, OSError)
|
||||
|
||||
|
||||
CACHE_VERSION = 3
|
||||
if USING_TSL_PACK:
|
||||
CACHE_VERSION = 4
|
||||
|
||||
|
||||
class RepoMap:
|
||||
CACHE_VERSION = 3
|
||||
TAGS_CACHE_DIR = f".aider.tags.cache.v{CACHE_VERSION}"
|
||||
|
||||
warned_files = set()
|
||||
|
@ -282,10 +286,15 @@ class RepoMap:
|
|||
query = language.query(query_scm)
|
||||
captures = query.captures(tree.root_node)
|
||||
|
||||
captures = list(captures)
|
||||
|
||||
saw = set()
|
||||
for node, tag in captures:
|
||||
if USING_TSL_PACK:
|
||||
all_nodes = []
|
||||
for tag, nodes in captures.items():
|
||||
all_nodes += [(node, tag) for node in nodes]
|
||||
else:
|
||||
all_nodes = list(captures)
|
||||
|
||||
for node, tag in all_nodes:
|
||||
if tag.startswith("name.definition."):
|
||||
kind = "def"
|
||||
elif tag.startswith("name.reference."):
|
||||
|
@ -732,8 +741,27 @@ def get_random_color():
|
|||
|
||||
def get_scm_fname(lang):
|
||||
# Load the tags queries
|
||||
if USING_TSL_PACK:
|
||||
subdir = "tree-sitter-language-pack"
|
||||
try:
|
||||
path = resources.files(__package__).joinpath(
|
||||
"queries",
|
||||
subdir,
|
||||
f"{lang}-tags.scm",
|
||||
)
|
||||
if path.exists():
|
||||
return path
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Fall back to tree-sitter-languages
|
||||
subdir = "tree-sitter-languages"
|
||||
try:
|
||||
return resources.files(__package__).joinpath("queries", f"tree-sitter-{lang}-tags.scm")
|
||||
return resources.files(__package__).joinpath(
|
||||
"queries",
|
||||
subdir,
|
||||
f"{lang}-tags.scm",
|
||||
)
|
||||
except KeyError:
|
||||
return
|
||||
|
||||
|
|
|
@ -97,6 +97,22 @@
|
|||
"supports_system_messages": true,
|
||||
"supports_response_schema": true
|
||||
},
|
||||
"openrouter/openai/o3-mini-high": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 0.0000011,
|
||||
"output_cost_per_token": 0.0000044,
|
||||
"cache_read_input_token_cost": 0.00000055,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_response_schema": true
|
||||
},
|
||||
"openrouter/openai/gpt-4o-mini": {
|
||||
"max_tokens": 16384,
|
||||
"max_input_tokens": 128000,
|
||||
|
@ -115,4 +131,98 @@
|
|||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true
|
||||
},
|
||||
"claude-3-7-sonnet-20250219": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"cache_creation_input_token_cost": 0.00000375,
|
||||
"cache_read_input_token_cost": 0.0000003,
|
||||
"litellm_provider": "anthropic",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"tool_use_system_prompt_tokens": 159,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"anthropic/claude-3-7-sonnet-20250219": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"cache_creation_input_token_cost": 0.00000375,
|
||||
"cache_read_input_token_cost": 0.0000003,
|
||||
"litellm_provider": "anthropic",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"tool_use_system_prompt_tokens": 159,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/anthropic/claude-3.7-sonnet": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"cache_creation_input_token_cost": 0.00000375,
|
||||
"cache_read_input_token_cost": 0.0000003,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"tool_use_system_prompt_tokens": 159,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"gpt-4.5-preview": {
|
||||
"max_tokens": 16384,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"input_cost_per_token": 0.000075,
|
||||
"output_cost_per_token": 0.00015,
|
||||
"cache_read_input_token_cost": 0.0000375,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openai/gpt-4.5-preview": {
|
||||
"max_tokens": 16384,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"input_cost_per_token": 0.000075,
|
||||
"output_cost_per_token": 0.00015,
|
||||
"cache_read_input_token_cost": 0.0000375,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
}
|
||||
|
|
|
@ -184,6 +184,156 @@
|
|||
editor_model_name: anthropic/claude-3-5-sonnet-20241022
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: anthropic/claude-3-7-sonnet-20250219
|
||||
edit_format: diff
|
||||
weak_model_name: anthropic/claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: anthropic/claude-3-7-sonnet-20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: anthropic/claude-3-7-sonnet-latest
|
||||
edit_format: diff
|
||||
weak_model_name: anthropic/claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: anthropic/claude-3-7-sonnet-latest
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-7-sonnet-20250219
|
||||
edit_format: diff
|
||||
weak_model_name: claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: claude-3-7-sonnet-20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-7-sonnet-latest
|
||||
edit_format: diff
|
||||
weak_model_name: claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: claude-3-7-sonnet-latest
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock/anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock_converse/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock_converse/anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock_converse/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock_converse/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock_converse/us.anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock_converse/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai/claude-3-7-sonnet@20250219
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
max_tokens: 64000
|
||||
editor_model_name: vertex_ai/claude-3-7-sonnet@20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai-anthropic_models/vertex_ai/claude-3-7-sonnet@20250219
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
max_tokens: 64000
|
||||
editor_model_name: vertex_ai-anthropic_models/vertex_ai/claude-3-7-sonnet@20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openrouter/anthropic/claude-3.7-sonnet
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/anthropic/claude-3-5-haiku
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: openrouter/anthropic/claude-3.7-sonnet
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openrouter/anthropic/claude-3.7-sonnet:beta
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/anthropic/claude-3-5-haiku
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: openrouter/anthropic/claude-3.7-sonnet
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock/anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
|
@ -657,6 +807,15 @@
|
|||
editor_edit_format: editor-diff
|
||||
system_prompt_prefix: "Formatting re-enabled. "
|
||||
|
||||
- name: openrouter/openai/o3-mini-high
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/openai/gpt-4o-mini
|
||||
use_repo_map: true
|
||||
use_temperature: false
|
||||
editor_model_name: openrouter/openai/gpt-4o
|
||||
editor_edit_format: editor-diff
|
||||
system_prompt_prefix: "Formatting re-enabled. "
|
||||
|
||||
- name: azure/o3-mini
|
||||
edit_format: diff
|
||||
weak_model_name: azure/gpt-4o-mini
|
||||
|
@ -666,4 +825,23 @@
|
|||
editor_edit_format: editor-diff
|
||||
system_prompt_prefix: "Formatting re-enabled. "
|
||||
|
||||
- name: gpt-4.5-preview
|
||||
edit_format: diff
|
||||
weak_model_name: gpt-4o-mini
|
||||
use_repo_map: true
|
||||
lazy: true
|
||||
reminder: sys
|
||||
examples_as_sys_msg: true
|
||||
editor_model_name: gpt-4o
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openai/gpt-4.5-preview
|
||||
edit_format: diff
|
||||
weak_model_name: gpt-4o-mini
|
||||
use_repo_map: true
|
||||
lazy: true
|
||||
reminder: sys
|
||||
examples_as_sys_msg: true
|
||||
editor_model_name: openai/gpt-4o
|
||||
editor_edit_format: editor-diff
|
||||
|
|
@ -41,6 +41,7 @@ ROOT_IMPORTANT_FILES = [
|
|||
"composer.lock",
|
||||
"pom.xml",
|
||||
"build.gradle",
|
||||
"build.gradle.kts",
|
||||
"build.sbt",
|
||||
"go.mod",
|
||||
"go.sum",
|
||||
|
|
|
@ -23,6 +23,36 @@ cog.out(text)
|
|||
]]]-->
|
||||
|
||||
|
||||
### main branch
|
||||
|
||||
- Added support for Claude 3.7 Sonnet models on OpenRouter, Bedrock and Vertex AI.
|
||||
- Aider wrote 47% of the code in this release.
|
||||
|
||||
### Aider v0.75.1
|
||||
|
||||
- Added support for `openrouter/anthropic/claude-3.7-sonnet`
|
||||
|
||||
### Aider v0.75.0
|
||||
|
||||
- Basic support for Claude 3.7 Sonnet
|
||||
- Use `--model sonnet` to use the new 3.7
|
||||
- Thinking support coming soon.
|
||||
- Bugfix to `/editor` command.
|
||||
- Aider wrote 46% of the code in this release.
|
||||
|
||||
### Aider v0.74.3
|
||||
|
||||
- Downgrade streamlit dependency to avoid threading bug.
|
||||
- Added support for tree-sitter language pack.
|
||||
- Added openrouter/o3-mini-high model configuration.
|
||||
- Added build.gradle.kts to special files for Kotlin project support, by Lucas Shadler.
|
||||
|
||||
### Aider v0.74.2
|
||||
|
||||
- Prevent more than one cache warming thread from becoming active.
|
||||
- Fixed continuation prompt ". " for multiline input.
|
||||
- Added HCL (Terraform) syntax support, by Warren Krewenki.
|
||||
|
||||
### Aider v0.74.1
|
||||
|
||||
- Have o1 & o3-mini generate markdown by sending the magic "Formatting re-enabled." string.
|
||||
|
|
|
@ -3844,3 +3844,73 @@
|
|||
"Viktor Sz\xE9pe": 3
|
||||
start_tag: v0.73.0
|
||||
total_lines: 783
|
||||
- aider_percentage: 46.31
|
||||
aider_total: 163
|
||||
end_date: '2025-02-24'
|
||||
end_tag: v0.75.0
|
||||
file_counts:
|
||||
aider/__init__.py:
|
||||
Paul Gauthier: 1
|
||||
aider/args.py:
|
||||
Paul Gauthier: 7
|
||||
aider/coders/base_coder.py:
|
||||
Paul Gauthier: 12
|
||||
Paul Gauthier (aider): 4
|
||||
aider/commands.py:
|
||||
FeepingCreature (aider): 6
|
||||
aider/editor.py:
|
||||
Paul Gauthier: 7
|
||||
Paul Gauthier (aider): 5
|
||||
aider/io.py:
|
||||
Paul Gauthier: 3
|
||||
Paul Gauthier (aider): 4
|
||||
aider/linter.py:
|
||||
Paul Gauthier: 1
|
||||
aider/main.py:
|
||||
Paul Gauthier: 16
|
||||
aider/models.py:
|
||||
Paul Gauthier: 4
|
||||
aider/queries/tree-sitter-language-pack/javascript-tags.scm:
|
||||
Paul Gauthier: 5
|
||||
aider/queries/tree-sitter-languages/hcl-tags.scm:
|
||||
Paul Gauthier: 3
|
||||
Warren Krewenki: 74
|
||||
aider/queries/tree-sitter-languages/javascript-tags.scm:
|
||||
Paul Gauthier: 5
|
||||
aider/repomap.py:
|
||||
Paul Gauthier: 43
|
||||
Paul Gauthier (aider): 11
|
||||
aider/special.py:
|
||||
Lucas Shadler: 1
|
||||
aider/website/docs/leaderboards/index.md:
|
||||
Paul Gauthier: 1
|
||||
benchmark/Dockerfile:
|
||||
Paul Gauthier (aider): 1
|
||||
benchmark/benchmark.py:
|
||||
Paul Gauthier: 4
|
||||
benchmark/cpp-test.sh:
|
||||
Paul Gauthier: 1
|
||||
scripts/blame.py:
|
||||
Paul Gauthier (aider): 2
|
||||
scripts/issues.py:
|
||||
Paul Gauthier (aider): 17
|
||||
tests/basic/test_coder.py:
|
||||
Paul Gauthier (aider): 18
|
||||
tests/basic/test_editor.py:
|
||||
Antti Kaihola: 1
|
||||
Paul Gauthier (aider): 41
|
||||
tests/basic/test_models.py:
|
||||
Paul Gauthier (aider): 1
|
||||
tests/basic/test_repomap.py:
|
||||
Paul Gauthier (aider): 1
|
||||
tests/fixtures/languages/hcl/test.tf:
|
||||
Paul Gauthier (aider): 52
|
||||
grand_total:
|
||||
Antti Kaihola: 1
|
||||
FeepingCreature (aider): 6
|
||||
Lucas Shadler: 1
|
||||
Paul Gauthier: 113
|
||||
Paul Gauthier (aider): 157
|
||||
Warren Krewenki: 74
|
||||
start_tag: v0.74.0
|
||||
total_lines: 352
|
||||
|
|
|
@ -1,3 +1,29 @@
|
|||
- dirname: 2025-02-25-20-23-07--gemini-pro
|
||||
test_cases: 225
|
||||
model: gemini/gemini-2.0-pro-exp-02-05
|
||||
edit_format: whole
|
||||
commit_hash: 2fccd47
|
||||
pass_rate_1: 20.4
|
||||
pass_rate_2: 35.6
|
||||
pass_num_1: 46
|
||||
pass_num_2: 80
|
||||
percent_cases_well_formed: 100.0
|
||||
error_outputs: 430
|
||||
num_malformed_responses: 0
|
||||
num_with_malformed_responses: 0
|
||||
user_asks: 13
|
||||
lazy_comments: 0
|
||||
syntax_errors: 0
|
||||
indentation_errors: 0
|
||||
exhausted_context_windows: 0
|
||||
test_timeouts: 5
|
||||
total_tests: 225
|
||||
command: aider --model gemini/gemini-2.0-pro-exp-02-05
|
||||
date: 2025-02-25
|
||||
versions: 0.75.2.dev
|
||||
seconds_per_case: 34.8
|
||||
total_cost: 0.0000
|
||||
|
||||
- dirname: 2024-12-21-18-41-18--polyglot-gpt-4o-mini
|
||||
test_cases: 225
|
||||
model: gpt-4o-mini-2024-07-18
|
||||
|
@ -543,4 +569,108 @@
|
|||
date: 2025-01-21
|
||||
versions: 0.72.2.dev
|
||||
seconds_per_case: 24.2
|
||||
total_cost: 0.0000
|
||||
total_cost: 0.0000
|
||||
|
||||
- dirname: 2025-02-15-19-51-22--chatgpt4o-feb15-diff
|
||||
test_cases: 223
|
||||
model: chatgpt-4o-latest (2025-02-15)
|
||||
edit_format: diff
|
||||
commit_hash: 108ce18-dirty
|
||||
pass_rate_1: 9.0
|
||||
pass_rate_2: 27.1
|
||||
pass_num_1: 20
|
||||
pass_num_2: 61
|
||||
percent_cases_well_formed: 93.3
|
||||
error_outputs: 66
|
||||
num_malformed_responses: 21
|
||||
num_with_malformed_responses: 15
|
||||
user_asks: 57
|
||||
lazy_comments: 0
|
||||
syntax_errors: 0
|
||||
indentation_errors: 0
|
||||
exhausted_context_windows: 0
|
||||
test_timeouts: 2
|
||||
total_tests: 225
|
||||
command: aider --model chatgpt-4o-latest
|
||||
date: 2025-02-15
|
||||
versions: 0.74.3.dev
|
||||
seconds_per_case: 12.4
|
||||
total_cost: 14.3703
|
||||
|
||||
- dirname: 2025-02-24-19-54-07--sonnet37-diff
|
||||
test_cases: 225
|
||||
model: claude-3-7-sonnet-20250219 (no thinking)
|
||||
edit_format: diff
|
||||
commit_hash: 75e9ee6
|
||||
pass_rate_1: 24.4
|
||||
pass_rate_2: 60.4
|
||||
pass_num_1: 55
|
||||
pass_num_2: 136
|
||||
percent_cases_well_formed: 93.3
|
||||
error_outputs: 16
|
||||
num_malformed_responses: 16
|
||||
num_with_malformed_responses: 15
|
||||
user_asks: 12
|
||||
lazy_comments: 0
|
||||
syntax_errors: 0
|
||||
indentation_errors: 0
|
||||
exhausted_context_windows: 0
|
||||
test_timeouts: 0
|
||||
total_tests: 225
|
||||
command: aider --model sonnet
|
||||
date: 2025-02-24
|
||||
versions: 0.74.4.dev
|
||||
seconds_per_case: 28.3
|
||||
total_cost: 17.7191
|
||||
|
||||
- dirname: 2025-02-24-21-47-23--sonnet37-diff-think-32k-64k
|
||||
test_cases: 225
|
||||
model: claude-3-7-sonnet-20250219 (32k thinking tokens)
|
||||
edit_format: diff
|
||||
commit_hash: 60d11a6, 93edbda
|
||||
pass_rate_1: 29.3
|
||||
pass_rate_2: 64.9
|
||||
pass_num_1: 66
|
||||
pass_num_2: 146
|
||||
percent_cases_well_formed: 97.8
|
||||
error_outputs: 66
|
||||
num_malformed_responses: 5
|
||||
num_with_malformed_responses: 5
|
||||
user_asks: 5
|
||||
lazy_comments: 0
|
||||
syntax_errors: 0
|
||||
indentation_errors: 0
|
||||
exhausted_context_windows: 0
|
||||
test_timeouts: 1
|
||||
total_tests: 225
|
||||
command: "aider --model anthropic/claude-3-7-sonnet-20250219 # plus yml config"
|
||||
date: 2025-02-24
|
||||
versions: 0.75.1.dev
|
||||
seconds_per_case: 105.2
|
||||
total_cost: 36.8343
|
||||
|
||||
- dirname: 2025-02-27-20-26-15--gpt45-diff3
|
||||
test_cases: 224
|
||||
model: gpt-4.5-preview
|
||||
edit_format: diff
|
||||
commit_hash: b462e55-dirty
|
||||
pass_rate_1: 22.3
|
||||
pass_rate_2: 44.9
|
||||
pass_num_1: 50
|
||||
pass_num_2: 101
|
||||
percent_cases_well_formed: 97.3
|
||||
error_outputs: 10
|
||||
num_malformed_responses: 8
|
||||
num_with_malformed_responses: 6
|
||||
user_asks: 15
|
||||
lazy_comments: 0
|
||||
syntax_errors: 0
|
||||
indentation_errors: 0
|
||||
exhausted_context_windows: 1
|
||||
test_timeouts: 2
|
||||
total_tests: 225
|
||||
command: aider --model openai/gpt-4.5-preview
|
||||
date: 2025-02-27
|
||||
versions: 0.75.2.dev
|
||||
seconds_per_case: 113.5
|
||||
total_cost: 183.1802
|
|
@ -23,6 +23,16 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
return (label && label.includes(HIGHLIGHT_MODEL)) ? 'rgba(255, 99, 132, 1)' : 'rgba(54, 162, 235, 1)';
|
||||
},
|
||||
borderWidth: 1
|
||||
}, {
|
||||
label: 'Total Cost ($)',
|
||||
data: [],
|
||||
type: 'scatter',
|
||||
yAxisID: 'y1',
|
||||
backgroundColor: 'rgba(153, 102, 255, 1)',
|
||||
borderColor: '#fff',
|
||||
borderWidth: 1,
|
||||
pointRadius: 5,
|
||||
pointHoverRadius: 7
|
||||
}]
|
||||
};
|
||||
|
||||
|
@ -32,7 +42,8 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
model: '{{ row.model }}',
|
||||
pass_rate: {{ row[pass_rate_field] }},
|
||||
percent_cases_well_formed: {{ row.percent_cases_well_formed }},
|
||||
edit_format: '{{ row.edit_format | default: "diff" }}'
|
||||
edit_format: '{{ row.edit_format | default: "diff" }}',
|
||||
total_cost: {{ row.total_cost | default: 0 }}
|
||||
});
|
||||
{% endfor %}
|
||||
|
||||
|
@ -43,6 +54,7 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
displayedData = [];
|
||||
leaderboardData.labels = [];
|
||||
leaderboardData.datasets[0].data = [];
|
||||
leaderboardData.datasets[1].data = [];
|
||||
|
||||
allData.forEach(function(row, index) {
|
||||
var rowElement = document.getElementById('edit-row-' + index);
|
||||
|
@ -53,6 +65,8 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
displayedData.push(row);
|
||||
leaderboardData.labels.push(row.model);
|
||||
leaderboardData.datasets[0].data.push(row.pass_rate);
|
||||
// Only include cost if it's not zero (placeholder for unknown)
|
||||
leaderboardData.datasets[1].data.push(row.total_cost > 0 ? row.total_cost : null);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -111,10 +125,29 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
fillStyle: blueDiagonalPattern,
|
||||
strokeStyle: 'rgba(54, 162, 235, 1)',
|
||||
lineWidth: 1
|
||||
},
|
||||
{
|
||||
text: 'Total Cost ($)',
|
||||
fillStyle: 'rgba(153, 102, 255, 1)',
|
||||
strokeStyle: '#fff',
|
||||
lineWidth: 1,
|
||||
pointStyle: 'circle'
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
callbacks: {
|
||||
label: function(context) {
|
||||
const datasetLabel = context.dataset.label || '';
|
||||
const value = context.parsed.y;
|
||||
if (datasetLabel === 'Total Cost ($)') {
|
||||
return datasetLabel + ': $' + value.toFixed(2);
|
||||
}
|
||||
return datasetLabel + ': ' + value.toFixed(1) + '%';
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
scales: {
|
||||
|
@ -125,6 +158,17 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
text: 'Percent completed correctly'
|
||||
}
|
||||
},
|
||||
y1: {
|
||||
beginAtZero: true,
|
||||
position: 'right',
|
||||
grid: {
|
||||
drawOnChartArea: false
|
||||
},
|
||||
title: {
|
||||
display: true,
|
||||
text: 'Total Cost ($)'
|
||||
}
|
||||
},
|
||||
x: {
|
||||
ticks: {
|
||||
callback: function(value, index) {
|
||||
|
@ -173,6 +217,7 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
displayedData = [];
|
||||
leaderboardData.labels = [];
|
||||
leaderboardData.datasets[0].data = [];
|
||||
leaderboardData.datasets[1].data = [];
|
||||
|
||||
for (var i = 0; i < rows.length; i++) {
|
||||
var rowText = rows[i].textContent;
|
||||
|
@ -181,6 +226,8 @@ document.addEventListener('DOMContentLoaded', function () {
|
|||
displayedData.push(allData[i]);
|
||||
leaderboardData.labels.push(allData[i].model);
|
||||
leaderboardData.datasets[0].data.push(allData[i].pass_rate);
|
||||
// Only include cost if it's not zero (placeholder for unknown)
|
||||
leaderboardData.datasets[1].data.push(allData[i].total_cost > 0 ? allData[i].total_cost : null);
|
||||
} else {
|
||||
rows[i].style.display = 'none';
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -23,7 +23,7 @@
|
|||
## Use claude-3-opus-20240229 model for the main chat
|
||||
#opus: false
|
||||
|
||||
## Use claude-3-5-sonnet-20241022 model for the main chat
|
||||
## Use anthropic/claude-3-7-sonnet-20250219 model for the main chat
|
||||
#sonnet: false
|
||||
|
||||
## Use claude-3-5-haiku-20241022 model for the main chat
|
||||
|
@ -436,3 +436,6 @@
|
|||
|
||||
## Specify which editor to use for the /editor command
|
||||
#editor: xxx
|
||||
|
||||
## Install the tree_sitter_language_pack (experimental)
|
||||
#install-tree-sitter-language-pack: false
|
||||
|
|
|
@ -27,7 +27,7 @@
|
|||
## Use claude-3-opus-20240229 model for the main chat
|
||||
#AIDER_OPUS=
|
||||
|
||||
## Use claude-3-5-sonnet-20241022 model for the main chat
|
||||
## Use anthropic/claude-3-7-sonnet-20250219 model for the main chat
|
||||
#AIDER_SONNET=
|
||||
|
||||
## Use claude-3-5-haiku-20241022 model for the main chat
|
||||
|
@ -404,3 +404,6 @@
|
|||
|
||||
## Specify which editor to use for the /editor command
|
||||
#AIDER_EDITOR=
|
||||
|
||||
## Install the tree_sitter_language_pack (experimental)
|
||||
#AIDER_INSTALL_TREE_SITTER_LANGUAGE_PACK=false
|
||||
|
|
|
@ -223,6 +223,32 @@ cog.out("```\n")
|
|||
editor_model_name: anthropic/claude-3-5-sonnet-20241022
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: anthropic/claude-3-7-sonnet-20250219
|
||||
edit_format: diff
|
||||
weak_model_name: anthropic/claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: anthropic/claude-3-7-sonnet-20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: anthropic/claude-3-7-sonnet-latest
|
||||
edit_format: diff
|
||||
weak_model_name: anthropic/claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: anthropic/claude-3-7-sonnet-latest
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: anthropic/claude-3-haiku-20240307
|
||||
weak_model_name: anthropic/claude-3-haiku-20240307
|
||||
examples_as_sys_msg: true
|
||||
|
@ -288,6 +314,58 @@ cog.out("```\n")
|
|||
editor_model_name: bedrock/anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock/anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock/us.anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock_converse/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock_converse/anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock_converse/anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: bedrock_converse/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
edit_format: diff
|
||||
weak_model_name: bedrock_converse/us.anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: bedrock_converse/us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-5-haiku-20241022
|
||||
edit_format: diff
|
||||
weak_model_name: claude-3-5-haiku-20241022
|
||||
|
@ -324,6 +402,32 @@ cog.out("```\n")
|
|||
editor_model_name: claude-3-5-sonnet-20241022
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-7-sonnet-20250219
|
||||
edit_format: diff
|
||||
weak_model_name: claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: claude-3-7-sonnet-20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-7-sonnet-latest
|
||||
edit_format: diff
|
||||
weak_model_name: claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: claude-3-7-sonnet-latest
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: claude-3-haiku-20240307
|
||||
weak_model_name: claude-3-haiku-20240307
|
||||
examples_as_sys_msg: true
|
||||
|
@ -696,6 +800,32 @@ cog.out("```\n")
|
|||
editor_model_name: openrouter/anthropic/claude-3.5-sonnet:beta
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openrouter/anthropic/claude-3.7-sonnet
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/anthropic/claude-3-5-haiku
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: openrouter/anthropic/claude-3.7-sonnet
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openrouter/anthropic/claude-3.7-sonnet:beta
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/anthropic/claude-3-5-haiku
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
cache_control: true
|
||||
editor_model_name: openrouter/anthropic/claude-3.7-sonnet
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: openrouter/deepseek/deepseek-chat
|
||||
edit_format: diff
|
||||
use_repo_map: true
|
||||
|
@ -796,6 +926,15 @@ cog.out("```\n")
|
|||
editor_edit_format: editor-diff
|
||||
system_prompt_prefix: 'Formatting re-enabled. '
|
||||
|
||||
- name: openrouter/openai/o3-mini-high
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/openai/gpt-4o-mini
|
||||
use_repo_map: true
|
||||
use_temperature: false
|
||||
editor_model_name: openrouter/openai/gpt-4o
|
||||
editor_edit_format: editor-diff
|
||||
system_prompt_prefix: 'Formatting re-enabled. '
|
||||
|
||||
- name: openrouter/qwen/qwen-2.5-coder-32b-instruct
|
||||
edit_format: diff
|
||||
weak_model_name: openrouter/qwen/qwen-2.5-coder-32b-instruct
|
||||
|
@ -803,6 +942,16 @@ cog.out("```\n")
|
|||
editor_model_name: openrouter/qwen/qwen-2.5-coder-32b-instruct
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai-anthropic_models/vertex_ai/claude-3-7-sonnet@20250219
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
max_tokens: 64000
|
||||
editor_model_name: vertex_ai-anthropic_models/vertex_ai/claude-3-7-sonnet@20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai/claude-3-5-haiku@20241022
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
|
@ -830,6 +979,16 @@ cog.out("```\n")
|
|||
editor_model_name: vertex_ai/claude-3-5-sonnet@20240620
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai/claude-3-7-sonnet@20250219
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
extra_params:
|
||||
max_tokens: 64000
|
||||
editor_model_name: vertex_ai/claude-3-7-sonnet@20250219
|
||||
editor_edit_format: editor-diff
|
||||
|
||||
- name: vertex_ai/claude-3-opus@20240229
|
||||
edit_format: diff
|
||||
weak_model_name: vertex_ai/claude-3-5-haiku@20241022
|
||||
|
|
|
@ -77,7 +77,7 @@ cog.outl("```")
|
|||
## Use claude-3-opus-20240229 model for the main chat
|
||||
#opus: false
|
||||
|
||||
## Use claude-3-5-sonnet-20241022 model for the main chat
|
||||
## Use anthropic/claude-3-7-sonnet-20250219 model for the main chat
|
||||
#sonnet: false
|
||||
|
||||
## Use claude-3-5-haiku-20241022 model for the main chat
|
||||
|
@ -490,5 +490,8 @@ cog.outl("```")
|
|||
|
||||
## Specify which editor to use for the /editor command
|
||||
#editor: xxx
|
||||
|
||||
## Install the tree_sitter_language_pack (experimental)
|
||||
#install-tree-sitter-language-pack: false
|
||||
```
|
||||
<!--[[[end]]]-->
|
||||
|
|
|
@ -67,7 +67,7 @@ cog.outl("```")
|
|||
## Use claude-3-opus-20240229 model for the main chat
|
||||
#AIDER_OPUS=
|
||||
|
||||
## Use claude-3-5-sonnet-20241022 model for the main chat
|
||||
## Use anthropic/claude-3-7-sonnet-20250219 model for the main chat
|
||||
#AIDER_SONNET=
|
||||
|
||||
## Use claude-3-5-haiku-20241022 model for the main chat
|
||||
|
@ -444,5 +444,8 @@ cog.outl("```")
|
|||
|
||||
## Specify which editor to use for the /editor command
|
||||
#AIDER_EDITOR=
|
||||
|
||||
## Install the tree_sitter_language_pack (experimental)
|
||||
#AIDER_INSTALL_TREE_SITTER_LANGUAGE_PACK=false
|
||||
```
|
||||
<!--[[[end]]]-->
|
||||
|
|
|
@ -60,7 +60,7 @@ for alias, model in sorted(MODEL_ALIASES.items()):
|
|||
- `haiku`: claude-3-5-haiku-20241022
|
||||
- `opus`: claude-3-opus-20240229
|
||||
- `r1`: deepseek/deepseek-reasoner
|
||||
- `sonnet`: claude-3-5-sonnet-20241022
|
||||
- `sonnet`: anthropic/claude-3-7-sonnet-20250219
|
||||
<!--[[[end]]]-->
|
||||
|
||||
## Priority
|
||||
|
|
|
@ -79,6 +79,7 @@ usage: aider [-h] [--model] [--opus] [--sonnet] [--haiku] [--4]
|
|||
[--fancy-input | --no-fancy-input]
|
||||
[--multiline | --no-multiline]
|
||||
[--detect-urls | --no-detect-urls] [--editor]
|
||||
[--install-tree-sitter-language-pack]
|
||||
|
||||
```
|
||||
|
||||
|
@ -101,7 +102,7 @@ Use claude-3-opus-20240229 model for the main chat
|
|||
Environment variable: `AIDER_OPUS`
|
||||
|
||||
### `--sonnet`
|
||||
Use claude-3-5-sonnet-20241022 model for the main chat
|
||||
Use anthropic/claude-3-7-sonnet-20250219 model for the main chat
|
||||
Environment variable: `AIDER_SONNET`
|
||||
|
||||
### `--haiku`
|
||||
|
@ -761,4 +762,9 @@ Aliases:
|
|||
### `--editor VALUE`
|
||||
Specify which editor to use for the /editor command
|
||||
Environment variable: `AIDER_EDITOR`
|
||||
|
||||
### `--install-tree-sitter-language-pack`
|
||||
Install the tree_sitter_language_pack (experimental)
|
||||
Default: False
|
||||
Environment variable: `AIDER_INSTALL_TREE_SITTER_LANGUAGE_PACK`
|
||||
<!--[[[end]]]-->
|
||||
|
|
|
@ -249,14 +249,16 @@ tr:hover { background-color: #f5f5f5; }
|
|||
</style>
|
||||
<table>
|
||||
<tr><th>Model Name</th><th class='right'>Total Tokens</th><th class='right'>Percent</th></tr>
|
||||
<tr><td>claude-3-5-sonnet-20241022</td><td class='right'>938,569</td><td class='right'>62.9%</td></tr>
|
||||
<tr><td>fireworks_ai/accounts/fireworks/models/deepseek-v3</td><td class='right'>273,005</td><td class='right'>18.3%</td></tr>
|
||||
<tr><td>deepseek/deepseek-chat</td><td class='right'>97,745</td><td class='right'>6.6%</td></tr>
|
||||
<tr><td>o3-mini</td><td class='right'>75,400</td><td class='right'>5.1%</td></tr>
|
||||
<tr><td>fireworks_ai/accounts/fireworks/models/deepseek-r1</td><td class='right'>65,251</td><td class='right'>4.4%</td></tr>
|
||||
<tr><td>claude-3-5-haiku-20241022</td><td class='right'>39,430</td><td class='right'>2.6%</td></tr>
|
||||
<tr><td>gemini/REDACTED</td><td class='right'>1,859</td><td class='right'>0.1%</td></tr>
|
||||
<tr><td>ollama_chat/REDACTED</td><td class='right'>309</td><td class='right'>0.0%</td></tr>
|
||||
<tr><td>claude-3-5-sonnet-20241022</td><td class='right'>444,178</td><td class='right'>45.0%</td></tr>
|
||||
<tr><td>anthropic/claude-3-7-sonnet-20250219</td><td class='right'>258,397</td><td class='right'>26.2%</td></tr>
|
||||
<tr><td>fireworks_ai/accounts/fireworks/models/deepseek-v3</td><td class='right'>105,999</td><td class='right'>10.7%</td></tr>
|
||||
<tr><td>claude-3-5-haiku-20241022</td><td class='right'>69,203</td><td class='right'>7.0%</td></tr>
|
||||
<tr><td>o3-mini</td><td class='right'>52,192</td><td class='right'>5.3%</td></tr>
|
||||
<tr><td>openrouter/anthropic/claude-3.7-sonnet</td><td class='right'>20,213</td><td class='right'>2.0%</td></tr>
|
||||
<tr><td>gpt-4o</td><td class='right'>12,595</td><td class='right'>1.3%</td></tr>
|
||||
<tr><td>openrouter/REDACTED</td><td class='right'>12,083</td><td class='right'>1.2%</td></tr>
|
||||
<tr><td>openrouter/openai/o3-mini</td><td class='right'>10,107</td><td class='right'>1.0%</td></tr>
|
||||
<tr><td>anthropic/REDACTED</td><td class='right'>1,999</td><td class='right'>0.2%</td></tr>
|
||||
</table>
|
||||
|
||||
{: .note :}
|
||||
|
|
|
@ -116,6 +116,6 @@ mod_dates = [get_last_modified_date(file) for file in files]
|
|||
latest_mod_date = max(mod_dates)
|
||||
cog.out(f"{latest_mod_date.strftime('%B %d, %Y.')}")
|
||||
]]]-->
|
||||
January 31, 2025.
|
||||
February 26, 2025.
|
||||
<!--[[[end]]]-->
|
||||
</p>
|
||||
|
|
|
@ -5,6 +5,15 @@ nav_order: 800
|
|||
|
||||
# Benchmark notes
|
||||
|
||||
## Notes on pricing
|
||||
|
||||
All pricing information is the cost to run the benchmark at the time it was
|
||||
run.
|
||||
Providers change their pricing, and every benchmark run ends up with a slightly
|
||||
different cost.
|
||||
Pricing is provided on a *best efforts* basis, and may not always be current
|
||||
or fully accurate.
|
||||
|
||||
## Notes on benchmarking results
|
||||
|
||||
The key benchmarking results are:
|
||||
|
|
|
@ -39,3 +39,34 @@ with more generous rate limits.
|
|||
You can use `aider --model <model-name>` to use any other Anthropic model.
|
||||
For example, if you want to use a specific version of Opus
|
||||
you could do `aider --model claude-3-opus-20240229`.
|
||||
|
||||
## Thinking tokens
|
||||
|
||||
Aider can work with Sonnet 3.7's new thinking tokens, but does not ask Sonnet to use
|
||||
thinking tokens by default.
|
||||
|
||||
Enabling thinking currently requires manual configuration.
|
||||
You need to add the following to your `.aider.model.settings.yml`
|
||||
[model settings file](/docs/config/adv-model-settings.html#model-settings).
|
||||
Adjust the `budget_tokens` value to change the target number of thinking tokens.
|
||||
|
||||
```yaml
|
||||
- name: anthropic/claude-3-7-sonnet-20250219
|
||||
edit_format: diff
|
||||
weak_model_name: anthropic/claude-3-5-haiku-20241022
|
||||
use_repo_map: true
|
||||
examples_as_sys_msg: true
|
||||
use_temperature: false
|
||||
extra_params:
|
||||
extra_headers:
|
||||
anthropic-beta: prompt-caching-2024-07-31,pdfs-2024-09-25,output-128k-2025-02-19
|
||||
max_tokens: 64000
|
||||
thinking:
|
||||
type: enabled
|
||||
budget_tokens: 32000 # Adjust this number
|
||||
cache_control: true
|
||||
editor_model_name: anthropic/claude-3-7-sonnet-20250219
|
||||
editor_edit_format: editor-diff
|
||||
```
|
||||
|
||||
More streamlined support will be coming soon.
|
||||
|
|
|
@ -11,6 +11,32 @@ You will need to have an AWS account with access to the Bedrock service.
|
|||
To configure Aider to use the Amazon Bedrock API, you need to set up your AWS credentials.
|
||||
This can be done using the AWS CLI or by setting environment variables.
|
||||
|
||||
## Select a Model from Amazon Bedrock
|
||||
|
||||
Before you can use a model through Amazon Bedrock, you must "enable" the model under the **Model
|
||||
Access** screen in the AWS Management Console.
|
||||
To find the `Model ID`, open the **Model Catalog** area in the Bedrock console, select the model
|
||||
you want to use, and the find the `modelId` property under the "Usage" heading.
|
||||
|
||||
### Bedrock Inference Profiles
|
||||
|
||||
Amazon Bedrock has added support for a new feature called [cross-region "inference profiles."](https://aws.amazon.com/about-aws/whats-new/2024/09/amazon-bedrock-knowledge-bases-cross-region-inference/)
|
||||
Some models hosted in Bedrock _only_ support these inference profiles.
|
||||
If you're using one of these models, then you will need to use the `Inference Profile ID`
|
||||
instead of the `Model ID` from the **Model Catalog** screen, in the AWS Management Console.
|
||||
For example, the Claude Sonnet 3.7 model, release in February 2025, exclusively supports
|
||||
inference through inference profiles. To use this model, you would use the
|
||||
`us.anthropic.claude-3-7-sonnet-20250219-v1:0` Inference Profile ID.
|
||||
In the Amazon Bedrock console, go to Inference and Assessment ➡️ Cross-region Inference
|
||||
to find the `Inference Profile ID` value.
|
||||
|
||||
If you attempt to use a `Model ID` for a model that exclusively supports the Inference Profile
|
||||
feature, you will receive an error message like the following:
|
||||
|
||||
> litellm.BadRequestError: BedrockException - b'{"message":"Invocation of model ID
|
||||
anthropic.claude-3-7-sonnet-20250219-v1:0 with on-demand throughput isn\xe2\x80\x99t supported. Retry your
|
||||
request with the ID or ARN of an inference profile that contains this model."}'
|
||||
|
||||
## AWS CLI Configuration
|
||||
|
||||
If you haven't already, install the [AWS CLI](https://aws.amazon.com/cli/) and configure it with your credentials:
|
||||
|
@ -39,6 +65,16 @@ export AWS_PROFILE=your-profile
|
|||
You can add these to your
|
||||
[.env file](/docs/config/dotenv.html).
|
||||
|
||||
### Set Environment Variables with PowerShell
|
||||
|
||||
If you're using PowerShell on MacOS, Linux, or Windows, you can set the same AWS configuration environment variables with these commands.
|
||||
|
||||
```pwsh
|
||||
$env:AWS_ACCESS_KEY_ID = 'your_access_key'
|
||||
$env:AWS_SECRET_ACCESS_KEY = 'your_secret_key'
|
||||
$env:AWS_REGION = 'us-west-2' # Put whichever AWS region that you'd like, that the Bedrock service supports.
|
||||
```
|
||||
|
||||
## Install boto3
|
||||
|
||||
The AWS Bedrock provider requires the `boto3` package in order to function correctly:
|
||||
|
|
|
@ -10,16 +10,18 @@ To use LM Studio:
|
|||
```
|
||||
python -m pip install -U aider-chat
|
||||
|
||||
export LM_STUDIO_API_KEY=<key> # Mac/Linux
|
||||
setx LM_STUDIO_API_KEY <key> # Windows, restart shell after setx
|
||||
# Must set a value here even if its a dummy value
|
||||
export LM_STUDIO_API_KEY=dummy-api-key # Mac/Linux
|
||||
setx LM_STUDIO_API_KEY dummy-api-key # Windows, restart shell after setx
|
||||
|
||||
export LM_STUDIO_API_BASE=<url> # Mac/Linux
|
||||
setx LM_STUDIO_API_BASE <url> # Windows, restart shell after setx
|
||||
# LM Studio default server URL is http://localhost:1234/v1
|
||||
export LM_STUDIO_API_BASE=http://localhost:1234/v1 # Mac/Linux
|
||||
setx LM_STUDIO_API_BASE http://localhost:1234/v1 # Windows, restart shell after setx
|
||||
|
||||
aider --model lm_studio/<your-model-name>
|
||||
```
|
||||
|
||||
|
||||
**Note:** Even though LM Studio doesn't require an API Key out of the box the `LM_STUDIO_API_KEY` must have a dummy value like `dummy-api-key` set or the client request will fail trying to send an empty `Bearer` token.
|
||||
|
||||
See the [model warnings](warnings.html)
|
||||
section for information on warnings which will occur
|
||||
|
|
|
@ -44,13 +44,13 @@ to allow use of all models.
|
|||
OpenRouter often has multiple providers serving each model.
|
||||
You can control which OpenRouter providers are used for your requests in two ways:
|
||||
|
||||
1. By "ignoring" certain providers in your
|
||||
1. By "ignoring" certain providers in your
|
||||
[OpenRouter account settings](https://openrouter.ai/settings/preferences).
|
||||
This disables those named providers across all the models that you access via OpenRouter.
|
||||
|
||||
2. By configuring "provider routing" in a `.aider.model.settings.yml` file.
|
||||
|
||||
Place that file in your home directory or the root if your git project, with
|
||||
Place that file in your home directory or the root of your git project, with
|
||||
entries like this:
|
||||
|
||||
```yaml
|
||||
|
|
|
@ -57,11 +57,17 @@ cog.out(model_list)
|
|||
]]]-->
|
||||
- anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
- anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
- anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
- claude-3-5-haiku-20241022
|
||||
- claude-3-5-haiku-latest
|
||||
- claude-3-5-sonnet-20240620
|
||||
- claude-3-5-sonnet-20241022
|
||||
- claude-3-5-sonnet-latest
|
||||
- claude-3-7-sonnet-20250219
|
||||
- claude-3-7-sonnet-latest
|
||||
- claude-3-haiku-20240307
|
||||
- claude-3-opus-20240229
|
||||
- claude-3-opus-latest
|
||||
- claude-3-sonnet-20240229
|
||||
- codestral/codestral-2405
|
||||
- codestral/codestral-latest
|
||||
|
@ -93,15 +99,18 @@ cog.out(model_list)
|
|||
- mistral/pixtral-large-2411
|
||||
- mistral/pixtral-large-latest
|
||||
- openrouter/anthropic/claude-3.5-sonnet
|
||||
- openrouter/anthropic/claude-3.7-sonnet
|
||||
- openrouter/deepseek/deepseek-r1
|
||||
- us.anthropic.claude-3-5-haiku-20241022-v1:0
|
||||
- us.anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
- us.anthropic.claude-3-7-sonnet-20250219-v1:0
|
||||
- vertex_ai/claude-3-5-haiku
|
||||
- vertex_ai/claude-3-5-haiku@20241022
|
||||
- vertex_ai/claude-3-5-sonnet
|
||||
- vertex_ai/claude-3-5-sonnet-v2
|
||||
- vertex_ai/claude-3-5-sonnet-v2@20241022
|
||||
- vertex_ai/claude-3-5-sonnet@20240620
|
||||
- vertex_ai/claude-3-7-sonnet@20250219
|
||||
- vertex_ai/claude-3-haiku
|
||||
- vertex_ai/claude-3-haiku@20240307
|
||||
- vertex_ai/claude-3-opus
|
||||
|
|
|
@ -342,6 +342,7 @@ def main(
|
|||
LONG_TIMEOUT = 24 * 60 * 60
|
||||
sendchat.RETRY_TIMEOUT = LONG_TIMEOUT
|
||||
base_coder.RETRY_TIMEOUT = LONG_TIMEOUT
|
||||
models.RETRY_TIMEOUT = LONG_TIMEOUT
|
||||
|
||||
if threads == 1:
|
||||
all_results = []
|
||||
|
|
|
@ -15,7 +15,7 @@ HARD_SET_NUM = 3 # Number of models that defines the hard set threshold
|
|||
|
||||
def get_dirs_from_leaderboard():
|
||||
# Load the leaderboard data
|
||||
with open("aider/website/_data/edit_leaderboard.yml") as f:
|
||||
with open("aider/website/_data/polyglot_leaderboard.yml") as f:
|
||||
leaderboard = yaml.safe_load(f)
|
||||
return [(entry["dirname"], entry["model"]) for entry in leaderboard]
|
||||
|
||||
|
@ -92,7 +92,7 @@ def analyze_exercise_solutions(dirs=None, topn=None, copy_hard_set=False):
|
|||
(
|
||||
entry["pass_rate_2"]
|
||||
for entry in yaml.safe_load(
|
||||
open("aider/website/_data/edit_leaderboard.yml")
|
||||
open("aider/website/_data/polyglot_leaderboard.yml")
|
||||
)
|
||||
if entry["dirname"] == dirname
|
||||
),
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
#
|
||||
aiohappyeyeballs==2.4.6
|
||||
# via aiohttp
|
||||
aiohttp==3.11.12
|
||||
aiohttp==3.11.13
|
||||
# via litellm
|
||||
aiosignal==1.3.2
|
||||
# via aiohttp
|
||||
|
@ -48,10 +48,12 @@ diff-match-patch==20241021
|
|||
diskcache==5.6.3
|
||||
# via -r requirements/requirements.in
|
||||
distro==1.9.0
|
||||
# via openai
|
||||
# via
|
||||
# openai
|
||||
# posthog
|
||||
filelock==3.17.0
|
||||
# via huggingface-hub
|
||||
flake8==7.1.1
|
||||
flake8==7.1.2
|
||||
# via -r requirements/requirements.in
|
||||
frozenlist==1.5.0
|
||||
# via
|
||||
|
@ -63,7 +65,7 @@ gitdb==4.0.12
|
|||
# via gitpython
|
||||
gitpython==3.1.44
|
||||
# via -r requirements/requirements.in
|
||||
grep-ast==0.5.0
|
||||
grep-ast==0.6.1
|
||||
# via -r requirements/requirements.in
|
||||
h11==0.14.0
|
||||
# via httpcore
|
||||
|
@ -73,7 +75,7 @@ httpx==0.28.1
|
|||
# via
|
||||
# litellm
|
||||
# openai
|
||||
huggingface-hub==0.28.1
|
||||
huggingface-hub==0.29.1
|
||||
# via tokenizers
|
||||
idna==3.10
|
||||
# via
|
||||
|
@ -99,7 +101,7 @@ jsonschema==4.23.0
|
|||
# litellm
|
||||
jsonschema-specifications==2024.10.1
|
||||
# via jsonschema
|
||||
litellm==1.60.6
|
||||
litellm==1.61.16
|
||||
# via -r requirements/requirements.in
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
|
@ -124,7 +126,7 @@ numpy==1.26.4
|
|||
# -r requirements/requirements.in
|
||||
# scipy
|
||||
# soundfile
|
||||
openai==1.61.1
|
||||
openai==1.64.0
|
||||
# via litellm
|
||||
packaging==24.2
|
||||
# via
|
||||
|
@ -138,15 +140,15 @@ pexpect==4.9.0
|
|||
# via -r requirements/requirements.in
|
||||
pillow==10.4.0
|
||||
# via -r requirements/requirements.in
|
||||
posthog==3.11.0
|
||||
posthog==3.16.0
|
||||
# via -r requirements/requirements.in
|
||||
prompt-toolkit==3.0.50
|
||||
# via -r requirements/requirements.in
|
||||
propcache==0.2.1
|
||||
propcache==0.3.0
|
||||
# via
|
||||
# aiohttp
|
||||
# yarl
|
||||
psutil==6.1.1
|
||||
psutil==7.0.0
|
||||
# via -r requirements/requirements.in
|
||||
ptyprocess==0.7.0
|
||||
# via pexpect
|
||||
|
@ -192,7 +194,7 @@ requests==2.32.3
|
|||
# tiktoken
|
||||
rich==13.9.4
|
||||
# via -r requirements/requirements.in
|
||||
rpds-py==0.22.3
|
||||
rpds-py==0.23.1
|
||||
# via
|
||||
# jsonschema
|
||||
# referencing
|
||||
|
@ -209,13 +211,15 @@ sniffio==1.3.1
|
|||
# via
|
||||
# anyio
|
||||
# openai
|
||||
socksio==1.0.0
|
||||
# via -r requirements/requirements.in
|
||||
sounddevice==0.5.1
|
||||
# via -r requirements/requirements.in
|
||||
soundfile==0.13.1
|
||||
# via -r requirements/requirements.in
|
||||
soupsieve==2.6
|
||||
# via beautifulsoup4
|
||||
tiktoken==0.8.0
|
||||
tiktoken==0.9.0
|
||||
# via litellm
|
||||
tokenizers==0.19.1
|
||||
# via
|
||||
|
@ -228,6 +232,7 @@ tqdm==4.67.1
|
|||
tree-sitter==0.21.3
|
||||
# via
|
||||
# -r requirements/requirements.in
|
||||
# grep-ast
|
||||
# tree-sitter-languages
|
||||
tree-sitter-languages==1.10.2
|
||||
# via grep-ast
|
||||
|
@ -254,5 +259,5 @@ zipp==3.21.0
|
|||
# via importlib-metadata
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==25.0
|
||||
pip==25.0.1
|
||||
# via -r requirements/requirements.in
|
||||
|
|
|
@ -15,7 +15,7 @@ attrs==25.1.0
|
|||
# referencing
|
||||
blinker==1.9.0
|
||||
# via streamlit
|
||||
cachetools==5.5.1
|
||||
cachetools==5.5.2
|
||||
# via streamlit
|
||||
certifi==2025.1.31
|
||||
# via
|
||||
|
@ -92,7 +92,7 @@ mdurl==0.1.2
|
|||
# -c requirements.txt
|
||||
# -c requirements/requirements-dev.txt
|
||||
# markdown-it-py
|
||||
narwhals==1.25.2
|
||||
narwhals==1.28.0
|
||||
# via altair
|
||||
numpy==1.26.4
|
||||
# via
|
||||
|
@ -124,7 +124,7 @@ pillow==10.4.0
|
|||
# streamlit
|
||||
protobuf==5.29.3
|
||||
# via streamlit
|
||||
pyarrow==19.0.0
|
||||
pyarrow==19.0.1
|
||||
# via streamlit
|
||||
pydeck==0.9.1
|
||||
# via streamlit
|
||||
|
@ -163,7 +163,7 @@ rich==13.9.4
|
|||
# -c requirements.txt
|
||||
# -c requirements/requirements-dev.txt
|
||||
# streamlit
|
||||
rpds-py==0.22.3
|
||||
rpds-py==0.23.1
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
|
@ -180,7 +180,7 @@ smmap==5.0.2
|
|||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# gitdb
|
||||
streamlit==1.42.0
|
||||
streamlit==1.42.2
|
||||
# via -r requirements/requirements-browser.in
|
||||
tenacity==9.0.0
|
||||
# via
|
||||
|
|
|
@ -53,7 +53,7 @@ filelock==3.17.0
|
|||
# virtualenv
|
||||
fonttools==4.56.0
|
||||
# via matplotlib
|
||||
identify==2.6.6
|
||||
identify==2.6.8
|
||||
# via pre-commit
|
||||
idna==3.10
|
||||
# via
|
||||
|
@ -173,6 +173,8 @@ rich==13.9.4
|
|||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# typer
|
||||
roman-numerals-py==3.1.0
|
||||
# via sphinx
|
||||
semver==3.0.4
|
||||
# via -r requirements/requirements-dev.in
|
||||
shellingham==1.5.4
|
||||
|
@ -184,7 +186,7 @@ six==1.17.0
|
|||
# python-dateutil
|
||||
snowballstemmer==2.2.0
|
||||
# via sphinx
|
||||
sphinx==8.1.3
|
||||
sphinx==8.2.1
|
||||
# via
|
||||
# sphinx-rtd-theme
|
||||
# sphinxcontrib-jquery
|
||||
|
@ -218,16 +220,16 @@ urllib3==2.3.0
|
|||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# requests
|
||||
virtualenv==20.29.1
|
||||
virtualenv==20.29.2
|
||||
# via pre-commit
|
||||
wheel==0.45.1
|
||||
# via pip-tools
|
||||
|
||||
# The following packages are considered to be unsafe in a requirements file:
|
||||
pip==25.0
|
||||
pip==25.0.1
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# pip-tools
|
||||
setuptools==75.8.0
|
||||
setuptools==75.8.1
|
||||
# via pip-tools
|
||||
|
|
|
@ -9,7 +9,7 @@ aiohappyeyeballs==2.4.6
|
|||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# aiohttp
|
||||
aiohttp==3.11.12
|
||||
aiohttp==3.11.13
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
|
@ -103,7 +103,7 @@ httpx==0.28.1
|
|||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
# llama-index-core
|
||||
huggingface-hub[inference]==0.28.1
|
||||
huggingface-hub[inference]==0.29.1
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
|
@ -130,11 +130,11 @@ joblib==1.4.2
|
|||
# via
|
||||
# nltk
|
||||
# scikit-learn
|
||||
llama-index-core==0.12.16.post1
|
||||
llama-index-core==0.12.20
|
||||
# via
|
||||
# -r requirements/requirements-help.in
|
||||
# llama-index-embeddings-huggingface
|
||||
llama-index-embeddings-huggingface==0.5.1
|
||||
llama-index-embeddings-huggingface==0.5.2
|
||||
# via -r requirements/requirements-help.in
|
||||
markupsafe==3.0.2
|
||||
# via
|
||||
|
@ -188,7 +188,7 @@ pillow==10.4.0
|
|||
# -c requirements/requirements-dev.txt
|
||||
# llama-index-core
|
||||
# sentence-transformers
|
||||
propcache==0.2.1
|
||||
propcache==0.3.0
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
|
@ -228,7 +228,7 @@ requests==2.32.3
|
|||
# llama-index-core
|
||||
# tiktoken
|
||||
# transformers
|
||||
safetensors==0.5.2
|
||||
safetensors==0.5.3
|
||||
# via transformers
|
||||
scikit-learn==1.6.1
|
||||
# via sentence-transformers
|
||||
|
@ -255,7 +255,7 @@ tenacity==9.0.0
|
|||
# via llama-index-core
|
||||
threadpoolctl==3.5.0
|
||||
# via scikit-learn
|
||||
tiktoken==0.8.0
|
||||
tiktoken==0.9.0
|
||||
# via
|
||||
# -c /Users/gauthier/Projects/aider/requirements.txt
|
||||
# -c requirements.txt
|
||||
|
|
|
@ -29,6 +29,7 @@ pexpect
|
|||
json5
|
||||
psutil
|
||||
watchfiles
|
||||
socksio
|
||||
pip
|
||||
|
||||
# The proper dependency is networkx[default], but this brings
|
||||
|
|
|
@ -38,6 +38,7 @@ def blame(start_tag, end_tag=None):
|
|||
for f in files
|
||||
if f.endswith((".js", ".py", ".scm", ".sh", "Dockerfile", "Gemfile"))
|
||||
or (f.startswith(".github/workflows/") and f.endswith(".yml"))
|
||||
or (f.startswith("aider/resources/") and f.endswith(".yml"))
|
||||
or f in website_files
|
||||
or f in test_files
|
||||
]
|
||||
|
|
|
@ -126,6 +126,11 @@ def find_oldest_issue(subject, all_issues):
|
|||
|
||||
|
||||
def comment_and_close_duplicate(issue, oldest_issue):
|
||||
# Skip if issue is labeled as priority
|
||||
if "priority" in [label["name"] for label in issue["labels"]]:
|
||||
print(f" - Skipping priority issue #{issue['number']}")
|
||||
return
|
||||
|
||||
comment_url = (
|
||||
f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue['number']}/comments"
|
||||
)
|
||||
|
@ -168,7 +173,11 @@ def find_unlabeled_with_paul_comments(issues):
|
|||
|
||||
def handle_unlabeled_issues(all_issues, auto_yes):
|
||||
print("\nFinding unlabeled issues with paul-gauthier comments...")
|
||||
unlabeled_issues = find_unlabeled_with_paul_comments(all_issues)
|
||||
unlabeled_issues = [
|
||||
issue
|
||||
for issue in find_unlabeled_with_paul_comments(all_issues)
|
||||
if "priority" not in [label["name"] for label in issue["labels"]]
|
||||
]
|
||||
|
||||
if not unlabeled_issues:
|
||||
print("No unlabeled issues with paul-gauthier comments found.")
|
||||
|
@ -197,10 +206,12 @@ def handle_stale_issues(all_issues, auto_yes):
|
|||
|
||||
for issue in all_issues:
|
||||
# Skip if not open, not a question, already stale, or has been reopened
|
||||
labels = [label["name"] for label in issue["labels"]]
|
||||
if (
|
||||
issue["state"] != "open"
|
||||
or "question" not in [label["name"] for label in issue["labels"]]
|
||||
or "stale" in [label["name"] for label in issue["labels"]]
|
||||
or "question" not in labels
|
||||
or "stale" in labels
|
||||
or "priority" in labels
|
||||
or has_been_reopened(issue["number"])
|
||||
):
|
||||
continue
|
||||
|
@ -239,8 +250,9 @@ def handle_stale_closing(all_issues, auto_yes):
|
|||
print("\nChecking for issues to close or unstale...")
|
||||
|
||||
for issue in all_issues:
|
||||
# Skip if not open or not stale
|
||||
if issue["state"] != "open" or "stale" not in [label["name"] for label in issue["labels"]]:
|
||||
# Skip if not open, not stale, or is priority
|
||||
labels = [label["name"] for label in issue["labels"]]
|
||||
if issue["state"] != "open" or "stale" not in labels or "priority" in labels:
|
||||
continue
|
||||
|
||||
# Get the timeline to find when the stale label was last added
|
||||
|
@ -324,9 +336,9 @@ def handle_fixed_issues(all_issues, auto_yes):
|
|||
print("\nChecking for fixed enhancement and bug issues to close...")
|
||||
|
||||
for issue in all_issues:
|
||||
# Skip if not open or doesn't have fixed label
|
||||
# Skip if not open, doesn't have fixed label, or is priority
|
||||
labels = [label["name"] for label in issue["labels"]]
|
||||
if issue["state"] != "open" or "fixed" not in labels:
|
||||
if issue["state"] != "open" or "fixed" not in labels or "priority" in labels:
|
||||
continue
|
||||
|
||||
# Check if it's an enhancement or bug
|
||||
|
|
|
@ -1,8 +1,6 @@
|
|||
import os
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from aider.editor import (
|
||||
DEFAULT_EDITOR_NIX,
|
||||
DEFAULT_EDITOR_OS_X,
|
||||
|
@ -21,7 +19,7 @@ def test_get_environment_editor():
|
|||
assert get_environment_editor("default") == "default"
|
||||
|
||||
# Test EDITOR precedence
|
||||
with patch.dict(os.environ, {"EDITOR": "vim"}):
|
||||
with patch.dict(os.environ, {"EDITOR": "vim"}, clear=True):
|
||||
assert get_environment_editor() == "vim"
|
||||
|
||||
# Test VISUAL overrides EDITOR
|
||||
|
@ -34,17 +32,17 @@ def test_discover_editor_defaults():
|
|||
# Test Windows default
|
||||
mock_system.return_value = "Windows"
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
assert discover_editor() == [DEFAULT_EDITOR_WINDOWS]
|
||||
assert discover_editor() == DEFAULT_EDITOR_WINDOWS
|
||||
|
||||
# Test macOS default
|
||||
mock_system.return_value = "Darwin"
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
assert discover_editor() == [DEFAULT_EDITOR_OS_X]
|
||||
assert discover_editor() == DEFAULT_EDITOR_OS_X
|
||||
|
||||
# Test Linux default
|
||||
mock_system.return_value = "Linux"
|
||||
with patch.dict(os.environ, {}, clear=True):
|
||||
assert discover_editor() == [DEFAULT_EDITOR_NIX]
|
||||
assert discover_editor() == DEFAULT_EDITOR_NIX
|
||||
|
||||
|
||||
def test_write_temp_file():
|
||||
|
@ -81,12 +79,44 @@ def test_print_status_message(capsys):
|
|||
|
||||
def test_discover_editor_override():
|
||||
# Test editor override
|
||||
assert discover_editor("code") == ["code"]
|
||||
assert discover_editor('vim -c "set noswapfile"') == ["vim", "-c", "set noswapfile"]
|
||||
assert discover_editor("code") == "code"
|
||||
assert discover_editor('vim -c "set noswapfile"') == 'vim -c "set noswapfile"'
|
||||
|
||||
# Test invalid editor command
|
||||
with pytest.raises(RuntimeError):
|
||||
discover_editor('vim "unclosed quote')
|
||||
|
||||
def test_pipe_editor_with_fake_editor():
|
||||
# Create a temporary Python script that logs its arguments
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".log", delete=False) as log_f:
|
||||
log_path = log_f.name
|
||||
# Convert to raw string path to avoid escape issues on Windows
|
||||
log_path_escaped = log_path.replace("\\", "\\\\")
|
||||
|
||||
with tempfile.NamedTemporaryFile(mode="w", suffix=".py", delete=False) as f:
|
||||
f.write(f"""import sys
|
||||
with open(r"{log_path_escaped}", "w") as f:
|
||||
f.write(" ".join(sys.argv))
|
||||
""")
|
||||
script_path = f.name
|
||||
|
||||
try:
|
||||
# Use the Python script as editor and verify it's called with .md file
|
||||
python_exe = sys.executable
|
||||
editor_cmd = f"{python_exe} {script_path}"
|
||||
pipe_editor("test content", suffix="md", editor=editor_cmd)
|
||||
|
||||
# Read the log file to see what arguments were passed
|
||||
with open(log_path) as f:
|
||||
called_args = f.read().strip()
|
||||
|
||||
# Verify the editor was called with a .md file
|
||||
assert called_args.endswith(".md"), f"Called args: {called_args!r}"
|
||||
|
||||
finally:
|
||||
# Clean up
|
||||
os.unlink(script_path)
|
||||
os.unlink(log_path)
|
||||
|
||||
|
||||
def test_pipe_editor():
|
||||
|
|
|
@ -123,7 +123,7 @@ class TestModels(unittest.TestCase):
|
|||
self.assertEqual(model.name, "gpt-3.5-turbo")
|
||||
|
||||
model = Model("sonnet")
|
||||
self.assertEqual(model.name, "claude-3-5-sonnet-20241022")
|
||||
self.assertEqual(model.name, "anthropic/claude-3-7-sonnet-20250219")
|
||||
|
||||
model = Model("haiku")
|
||||
self.assertEqual(model.name, "claude-3-5-haiku-20241022")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue