refactor: adjust max_chat_history_tokens calculation based on max_input_tokens

This commit is contained in:
Paul Gauthier 2025-01-04 06:18:12 -08:00 committed by Paul Gauthier (aider)
parent 729354b038
commit ff41f9bd9a

View file

@ -920,10 +920,13 @@ class Model(ModelSettings):
self.keys_in_environment = res.get("keys_in_environment") self.keys_in_environment = res.get("keys_in_environment")
max_input_tokens = self.info.get("max_input_tokens") or 0 max_input_tokens = self.info.get("max_input_tokens") or 0
if max_input_tokens < 32 * 1024: # generalize this with division. ai!
self.max_chat_history_tokens = 1024 if max_input_tokens < 16 * 1024:
else: self.max_chat_history_tokens = 1 * 1024
elif max_input_tokens < 32 * 1024:
self.max_chat_history_tokens = 2 * 1024 self.max_chat_history_tokens = 2 * 1024
else:
self.max_chat_history_tokens = 4 * 1024
self.configure_model_settings(model) self.configure_model_settings(model)
if weak_model is False: if weak_model is False: