mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-06 20:54:59 +00:00
doc: fix wrong encapsulation
This commit is contained in:
parent
f3ff24e35a
commit
3bc056a107
3 changed files with 4 additions and 6 deletions
|
@ -237,10 +237,8 @@ def get_parser(default_config_files, git_root):
|
||||||
type=int,
|
type=int,
|
||||||
default=None,
|
default=None,
|
||||||
help=(
|
help=(
|
||||||
(
|
"Soft limit on tokens for chat history, after which summarization begins."
|
||||||
"Soft limit on tokens for chat history, after which summarization begins."
|
" If unspecified, defaults to the model's max_chat_history_tokens."
|
||||||
" If unspecified, defaults to the model's max_chat_history_tokens."
|
|
||||||
),
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
# This is a duplicate of the argument in the preparser and is a no-op by this time of
|
# This is a duplicate of the argument in the preparser and is a no-op by this time of
|
||||||
|
|
|
@ -108,7 +108,7 @@
|
||||||
## Only work with models that have meta-data available (default: True)
|
## Only work with models that have meta-data available (default: True)
|
||||||
#AIDER_SHOW_MODEL_WARNINGS=true
|
#AIDER_SHOW_MODEL_WARNINGS=true
|
||||||
|
|
||||||
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
|
## ("Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.",)
|
||||||
#AIDER_MAX_CHAT_HISTORY_TOKENS=
|
#AIDER_MAX_CHAT_HISTORY_TOKENS=
|
||||||
|
|
||||||
## Specify the .env file to load (default: .env in git root)
|
## Specify the .env file to load (default: .env in git root)
|
||||||
|
|
|
@ -150,7 +150,7 @@ cog.outl("```")
|
||||||
## Only work with models that have meta-data available (default: True)
|
## Only work with models that have meta-data available (default: True)
|
||||||
#AIDER_SHOW_MODEL_WARNINGS=true
|
#AIDER_SHOW_MODEL_WARNINGS=true
|
||||||
|
|
||||||
## Maximum number of tokens to use for chat history. If not specified, uses the model's max_chat_history_tokens.
|
## ("Soft limit on tokens for chat history, after which summarization begins. If unspecified, defaults to the model's max_chat_history_tokens.",)
|
||||||
#AIDER_MAX_CHAT_HISTORY_TOKENS=
|
#AIDER_MAX_CHAT_HISTORY_TOKENS=
|
||||||
|
|
||||||
## Specify the .env file to load (default: .env in git root)
|
## Specify the .env file to load (default: .env in git root)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue