mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-01 02:05:00 +00:00
cleanup metadata that is now in litellm
This commit is contained in:
parent
13b3e75d0e
commit
51e0fff822
1 changed files with 0 additions and 202 deletions
|
@ -15,22 +15,6 @@
|
|||
//"supports_tool_choice": true,
|
||||
"supports_prompt_caching": true
|
||||
},
|
||||
"openrouter/deepseek/deepseek-r1": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 64000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.00000055,
|
||||
"input_cost_per_token_cache_hit": 0.00000014,
|
||||
"cache_read_input_token_cost": 0.00000014,
|
||||
"cache_creation_input_token_cost": 0.0,
|
||||
"output_cost_per_token": 0.00000219,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
//"supports_function_calling": true,
|
||||
"supports_assistant_prefill": true,
|
||||
//"supports_tool_choice": true,
|
||||
"supports_prompt_caching": true
|
||||
},
|
||||
"openrouter/deepseek/deepseek-r1:free": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 64000,
|
||||
|
@ -99,15 +83,6 @@
|
|||
"output_cost_per_token": 0.000008,
|
||||
"mode": "chat",
|
||||
},
|
||||
"fireworks_ai/accounts/fireworks/models/deepseek-v3": {
|
||||
"max_tokens": 128000,
|
||||
"max_input_tokens": 100000,
|
||||
"max_output_tokens": 8192,
|
||||
"litellm_provider": "fireworks_ai",
|
||||
"input_cost_per_token": 0.0000009,
|
||||
"output_cost_per_token": 0.0000009,
|
||||
"mode": "chat",
|
||||
},
|
||||
"fireworks_ai/accounts/fireworks/models/deepseek-v3-0324": {
|
||||
"max_tokens": 160000,
|
||||
"max_input_tokens": 100000,
|
||||
|
@ -117,54 +92,6 @@
|
|||
"output_cost_per_token": 0.0000009,
|
||||
"mode": "chat",
|
||||
},
|
||||
"o3-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 0.0000011,
|
||||
"output_cost_per_token": 0.0000044,
|
||||
"cache_read_input_token_cost": 0.00000055,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_response_schema": true
|
||||
},
|
||||
"openrouter/openai/o3-mini": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 0.0000011,
|
||||
"output_cost_per_token": 0.0000044,
|
||||
"cache_read_input_token_cost": 0.00000055,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_response_schema": true
|
||||
},
|
||||
"openrouter/openai/o3-mini-high": {
|
||||
"max_tokens": 100000,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 100000,
|
||||
"input_cost_per_token": 0.0000011,
|
||||
"output_cost_per_token": 0.0000044,
|
||||
"cache_read_input_token_cost": 0.00000055,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_response_schema": true
|
||||
},
|
||||
"openrouter/openrouter/quasar-alpha": {
|
||||
"max_input_tokens": 1000000,
|
||||
"max_output_tokens": 32000,
|
||||
|
@ -203,26 +130,6 @@
|
|||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true
|
||||
},
|
||||
"claude-3-7-sonnet-20250219": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"cache_creation_input_token_cost": 0.00000375,
|
||||
"cache_read_input_token_cost": 0.0000003,
|
||||
"litellm_provider": "anthropic",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"tool_use_system_prompt_tokens": 159,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"anthropic/claude-3-7-sonnet-20250219": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
|
@ -243,43 +150,6 @@
|
|||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openrouter/anthropic/claude-3.7-sonnet": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 200000,
|
||||
"max_output_tokens": 8192,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"cache_creation_input_token_cost": 0.00000375,
|
||||
"cache_read_input_token_cost": 0.0000003,
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"tool_use_system_prompt_tokens": 159,
|
||||
"supports_assistant_prefill": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_response_schema": true,
|
||||
"deprecation_date": "2025-10-01",
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"gpt-4.5-preview": {
|
||||
"max_tokens": 16384,
|
||||
"max_input_tokens": 128000,
|
||||
"max_output_tokens": 16384,
|
||||
"input_cost_per_token": 0.000075,
|
||||
"output_cost_per_token": 0.00015,
|
||||
"cache_read_input_token_cost": 0.0000375,
|
||||
"litellm_provider": "openai",
|
||||
"mode": "chat",
|
||||
"supports_function_calling": true,
|
||||
"supports_parallel_function_calling": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_vision": true,
|
||||
"supports_prompt_caching": true,
|
||||
"supports_system_messages": true,
|
||||
"supports_tool_choice": true
|
||||
},
|
||||
"openai/gpt-4.5-preview": {
|
||||
"max_tokens": 16384,
|
||||
"max_input_tokens": 128000,
|
||||
|
@ -334,42 +204,6 @@
|
|||
"supports_tool_choice": true,
|
||||
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
||||
},
|
||||
"gemini/gemini-2.5-pro-preview-03-25": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 1048576,
|
||||
"max_output_tokens": 64000,
|
||||
"max_images_per_prompt": 3000,
|
||||
"max_videos_per_prompt": 10,
|
||||
"max_video_length": 1,
|
||||
"max_audio_length_hours": 8.4,
|
||||
"max_audio_per_prompt": 1,
|
||||
"max_pdf_size_mb": 30,
|
||||
"input_cost_per_image": 0,
|
||||
"input_cost_per_video_per_second": 0,
|
||||
"input_cost_per_audio_per_second": 0,
|
||||
"input_cost_per_token": 0.00000125,
|
||||
"input_cost_per_character": 0,
|
||||
"input_cost_per_token_above_128k_tokens": 0,
|
||||
"input_cost_per_character_above_128k_tokens": 0,
|
||||
"input_cost_per_image_above_128k_tokens": 0,
|
||||
"input_cost_per_video_per_second_above_128k_tokens": 0,
|
||||
"input_cost_per_audio_per_second_above_128k_tokens": 0,
|
||||
"output_cost_per_token": 0.000010,
|
||||
"output_cost_per_character": 0,
|
||||
"output_cost_per_token_above_128k_tokens": 0,
|
||||
"output_cost_per_character_above_128k_tokens": 0,
|
||||
"litellm_provider": "gemini",
|
||||
"mode": "chat",
|
||||
"supports_system_messages": true,
|
||||
"supports_function_calling": true,
|
||||
"supports_vision": true,
|
||||
"supports_audio_input": true,
|
||||
"supports_video_input": true,
|
||||
"supports_pdf_input": true,
|
||||
"supports_response_schema": true,
|
||||
"supports_tool_choice": true,
|
||||
"source": "https://cloud.google.com/vertex-ai/generative-ai/pricing"
|
||||
},
|
||||
"vertex_ai/gemini-2.5-pro-exp-03-25": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 1048576,
|
||||
|
@ -523,15 +357,6 @@
|
|||
"litellm_provider": "openrouter",
|
||||
"mode": "chat"
|
||||
},
|
||||
"xai/grok-3-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 131072,
|
||||
"input_cost_per_token": 0.000003,
|
||||
"output_cost_per_token": 0.000015,
|
||||
"litellm_provider": "xai",
|
||||
"mode": "chat"
|
||||
},
|
||||
"openrouter/x-ai/grok-3-mini-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
|
@ -541,15 +366,6 @@
|
|||
"litellm_provider": "openrouter",
|
||||
"mode": "chat"
|
||||
},
|
||||
"xai/grok-3-mini-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 131072,
|
||||
"input_cost_per_token": 0.0000003,
|
||||
"output_cost_per_token": 0.0000005,
|
||||
"litellm_provider": "xai",
|
||||
"mode": "chat"
|
||||
},
|
||||
"openrouter/x-ai/grok-3-fast-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
|
@ -559,15 +375,6 @@
|
|||
"litellm_provider": "openrouter",
|
||||
"mode": "chat"
|
||||
},
|
||||
"xai/grok-3-fast-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 131072,
|
||||
"input_cost_per_token": 0.000005,
|
||||
"output_cost_per_token": 0.000025,
|
||||
"litellm_provider": "xai",
|
||||
"mode": "chat"
|
||||
},
|
||||
"openrouter/x-ai/grok-3-mini-fast-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
|
@ -577,15 +384,6 @@
|
|||
"litellm_provider": "openrouter",
|
||||
"mode": "chat"
|
||||
},
|
||||
"xai/grok-3-mini-fast-beta": {
|
||||
"max_tokens": 131072,
|
||||
"max_input_tokens": 131072,
|
||||
"max_output_tokens": 131072,
|
||||
"input_cost_per_token": 0.0000006,
|
||||
"output_cost_per_token": 0.000004,
|
||||
"litellm_provider": "xai",
|
||||
"mode": "chat"
|
||||
},
|
||||
"openrouter/google/gemini-2.0-flash-exp:free": {
|
||||
"max_tokens": 8192,
|
||||
"max_input_tokens": 1048576,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue