feat: Add free DeepSeek chat model configuration to model metadata

This commit is contained in:
Paul Gauthier (aider) 2025-03-31 09:08:51 +13:00
parent 19e1201c8a
commit a9c9877580

View file

@ -79,6 +79,17 @@
//"supports_tool_choice": true,
"supports_prompt_caching": true
},
"openrouter/deepseek/deepseek-chat-v3-0324:free": {
"max_tokens": 131072,
"max_input_tokens": 131072,
"max_output_tokens": 131072,
"input_cost_per_token": 0,
"output_cost_per_token": 0,
"litellm_provider": "openrouter",
"supports_prompt_caching": true,
"mode": "chat",
"supports_tool_choice": true
},
"fireworks_ai/accounts/fireworks/models/deepseek-r1": {
"max_tokens": 160000,
"max_input_tokens": 128000,