refactor: Increase max chat history tokens limit from 4k to 8k

This commit is contained in:
Paul Gauthier 2025-01-04 06:19:38 -08:00 committed by Paul Gauthier (aider)
parent d6b612a4a3
commit 463fdb1ed9

View file

@ -921,8 +921,8 @@ class Model(ModelSettings):
max_input_tokens = self.info.get("max_input_tokens") or 0
# Calculate max_chat_history_tokens as 1/16th of max_input_tokens,
# with minimum 1k and maximum 4k
self.max_chat_history_tokens = min(max(max_input_tokens // 16, 1024), 4096)
# with minimum 1k and maximum 8k
self.max_chat_history_tokens = min(max(max_input_tokens / 16, 1024), 8192)
self.configure_model_settings(model)
if weak_model is False: