feat: Remove max_tokens from deepseek model settings

This commit is contained in:
Paul Gauthier 2025-04-04 15:25:36 +13:00 committed by Paul Gauthier (aider)
parent 425284ac62
commit f8801d811b

View file

@ -589,8 +589,6 @@
weak_model_name: openrouter/deepseek/deepseek-chat-v3-0324:free
use_repo_map: true
examples_as_sys_msg: true
extra_params:
max_tokens: 131072
caches_by_default: true
use_temperature: false
editor_model_name: openrouter/deepseek/deepseek-chat-v3-0324:free