mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-25 06:44:59 +00:00
refactor: adjust max_chat_history_tokens calculation based on max_input_tokens
This commit is contained in:
parent
729354b038
commit
ff41f9bd9a
1 changed files with 6 additions and 3 deletions
|
@ -920,10 +920,13 @@ class Model(ModelSettings):
|
||||||
self.keys_in_environment = res.get("keys_in_environment")
|
self.keys_in_environment = res.get("keys_in_environment")
|
||||||
|
|
||||||
max_input_tokens = self.info.get("max_input_tokens") or 0
|
max_input_tokens = self.info.get("max_input_tokens") or 0
|
||||||
if max_input_tokens < 32 * 1024:
|
# generalize this with division. ai!
|
||||||
self.max_chat_history_tokens = 1024
|
if max_input_tokens < 16 * 1024:
|
||||||
else:
|
self.max_chat_history_tokens = 1 * 1024
|
||||||
|
elif max_input_tokens < 32 * 1024:
|
||||||
self.max_chat_history_tokens = 2 * 1024
|
self.max_chat_history_tokens = 2 * 1024
|
||||||
|
else:
|
||||||
|
self.max_chat_history_tokens = 4 * 1024
|
||||||
|
|
||||||
self.configure_model_settings(model)
|
self.configure_model_settings(model)
|
||||||
if weak_model is False:
|
if weak_model is False:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue