Merge branch 'main' of github.com:Aider-AI/aider

This commit is contained in:
Paul Gauthier 2024-10-10 15:24:04 -07:00
commit 2f307e46e2
9 changed files with 36 additions and 15 deletions

View file

@ -237,8 +237,8 @@ def get_parser(default_config_files, git_root):
type=int,
default=None,
help=(
"Maximum number of tokens to use for chat history. If not specified, uses the model's"
" max_chat_history_tokens."
"Soft limit on tokens for chat history, after which summarization begins."
" If unspecified, defaults to the model's max_chat_history_tokens."
),
)
# This is a duplicate of the argument in the preparser and is a no-op by this time of
@ -719,7 +719,6 @@ def get_md_help():
parser.formatter_class = MarkdownHelpFormatter
return argparse.ArgumentParser.format_help(parser)
return parser.format_help()
def get_sample_yaml():
@ -733,7 +732,6 @@ def get_sample_yaml():
parser.formatter_class = YamlHelpFormatter
return argparse.ArgumentParser.format_help(parser)
return parser.format_help()
def get_sample_dotenv():
@ -747,7 +745,6 @@ def get_sample_dotenv():
parser.formatter_class = DotEnvFormatter
return argparse.ArgumentParser.format_help(parser)
return parser.format_help()
def main():

View file

@ -319,7 +319,7 @@ MODEL_SETTINGS = [
ModelSettings(
"openrouter/anthropic/claude-3.5-sonnet",
"diff",
weak_model_name="openrouter/anthropic/claude-3-haiku-20240307",
weak_model_name="openrouter/anthropic/claude-3-haiku",
editor_model_name="openrouter/anthropic/claude-3.5-sonnet",
editor_edit_format="editor-diff",
use_repo_map=True,
@ -334,7 +334,7 @@ MODEL_SETTINGS = [
ModelSettings(
"openrouter/anthropic/claude-3.5-sonnet:beta",
"diff",
weak_model_name="openrouter/anthropic/claude-3-haiku-20240307",
weak_model_name="openrouter/anthropic/claude-3-haiku:beta",
editor_model_name="openrouter/anthropic/claude-3.5-sonnet:beta",
editor_edit_format="editor-diff",
use_repo_map=True,
@ -513,6 +513,18 @@ MODEL_SETTINGS = [
use_temperature=False,
streaming=False,
),
ModelSettings(
"azure/o1-mini",
"whole",
weak_model_name="azure/gpt-4o-mini",
editor_model_name="azure/gpt-4o",
editor_edit_format="editor-diff",
use_repo_map=True,
reminder="user",
use_system_prompt=False,
use_temperature=False,
streaming=False,
),
ModelSettings(
"o1-mini",
"whole",
@ -537,6 +549,18 @@ MODEL_SETTINGS = [
use_temperature=False,
streaming=False,
),
ModelSettings(
"azure/o1-preview",
"diff",
weak_model_name="azure/gpt-4o-mini",
editor_model_name="azure/gpt-4o",
editor_edit_format="editor-diff",
use_repo_map=True,
reminder="user",
use_system_prompt=False,
use_temperature=False,
streaming=False,
),
ModelSettings(
"o1-preview",
"architect",

View file

@ -2,4 +2,4 @@ You can send long, multi-line messages in the chat in a few ways:
- Paste a multi-line message directly into the chat.
- Enter `{` alone on the first line to start a multiline message and `}` alone on the last line to end it.
- Use Meta-ENTER to start a new line without sending the message (Esc+ENTER in some environments).
- Use `/clipboard` to paste text from the clipboard into the chat.
- Use `/paste` to paste text from the clipboard into the chat.

View file

@ -104,7 +104,7 @@
## Only work with models that have meta-data available (default: True)
#show-model-warnings: true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.
#max-chat-history-tokens: xxx
## Specify the .env file to load (default: .env in git root)

View file

@ -108,7 +108,7 @@
## Only work with models that have meta-data available (default: True)
#AIDER_SHOW_MODEL_WARNINGS=true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.
#AIDER_MAX_CHAT_HISTORY_TOKENS=
## Specify the .env file to load (default: .env in git root)

View file

@ -550,7 +550,7 @@ cog.out("```\n")
use_repo_map: true
use_system_prompt: true
use_temperature: true
weak_model_name: openrouter/anthropic/claude-3-haiku-20240307
weak_model_name: openrouter/anthropic/claude-3-haiku
- accepts_images: true
cache_control: true
caches_by_default: false
@ -568,7 +568,7 @@ cog.out("```\n")
use_repo_map: true
use_system_prompt: true
use_temperature: true
weak_model_name: openrouter/anthropic/claude-3-haiku-20240307
weak_model_name: openrouter/anthropic/claude-3-haiku:beta
- accepts_images: true
cache_control: false
caches_by_default: false

View file

@ -160,7 +160,7 @@ cog.outl("```")
## Only work with models that have meta-data available (default: True)
#show-model-warnings: true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.
#max-chat-history-tokens: xxx
## Specify the .env file to load (default: .env in git root)

View file

@ -150,7 +150,7 @@ cog.outl("```")
## Only work with models that have meta-data available (default: True)
#AIDER_SHOW_MODEL_WARNINGS=true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
## Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.
#AIDER_MAX_CHAT_HISTORY_TOKENS=
## Specify the .env file to load (default: .env in git root)

View file

@ -223,7 +223,7 @@ Aliases:
- `--no-show-model-warnings`
### `--max-chat-history-tokens VALUE`
Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.
Environment variable: `AIDER_MAX_CHAT_HISTORY_TOKENS`
### `--env-file ENV_FILE`