This commit is contained in:
Paul Gauthier 2024-08-20 17:06:14 -07:00
parent bbfbdb8adb
commit 60f3dc055c
5 changed files with 20 additions and 2 deletions

View file

@ -98,6 +98,9 @@
## Enable caching of prompts (default: False)
#cache-prompts: false
## Multiplier for map tokens when no files are specified (default: 2)
#map-multiplier-no-files: true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
#max-chat-history-tokens:

View file

@ -102,6 +102,9 @@
## Enable caching of prompts (default: False)
#AIDER_CACHE_PROMPTS=false
## Multiplier for map tokens when no files are specified (default: 2)
#AIDER_MAP_MULTIPLIER_NO_FILES=true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
#AIDER_MAX_CHAT_HISTORY_TOKENS=

View file

@ -137,6 +137,9 @@ cog.outl("```")
## Enable caching of prompts (default: False)
#cache-prompts: false
## Multiplier for map tokens when no files are specified (default: 2)
#map-multiplier-no-files: true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
#max-chat-history-tokens:

View file

@ -144,6 +144,9 @@ cog.outl("```")
## Enable caching of prompts (default: False)
#AIDER_CACHE_PROMPTS=false
## Multiplier for map tokens when no files are specified (default: 2)
#AIDER_MAP_MULTIPLIER_NO_FILES=true
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
#AIDER_MAX_CHAT_HISTORY_TOKENS=

View file

@ -36,8 +36,9 @@ usage: aider [-h] [--openai-api-key] [--anthropic-api-key] [--model]
[--show-model-warnings | --no-show-model-warnings]
[--map-tokens] [--map-refresh]
[--cache-prompts | --no-cache-prompts]
[--max-chat-history-tokens] [--env-file]
[--input-history-file] [--chat-history-file]
[--map-multiplier-no-files] [--max-chat-history-tokens]
[--env-file] [--input-history-file]
[--chat-history-file]
[--restore-chat-history | --no-restore-chat-history]
[--llm-history-file] [--dark-mode] [--light-mode]
[--pretty | --no-pretty] [--stream | --no-stream]
@ -204,6 +205,11 @@ Aliases:
- `--cache-prompts`
- `--no-cache-prompts`
### `--map-multiplier-no-files VALUE`
Multiplier for map tokens when no files are specified (default: 2)
Default: 2
Environment variable: `AIDER_MAP_MULTIPLIER_NO_FILES`
### `--max-chat-history-tokens VALUE`
Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
Environment variable: `AIDER_MAX_CHAT_HISTORY_TOKENS`