Use fq model name in --models search

This commit is contained in:
Paul Gauthier 2024-04-22 19:17:27 -07:00
parent 25b8d6fec8
commit 89a7b3470a
3 changed files with 48 additions and 29 deletions

View file

@ -574,7 +574,11 @@ def main(argv=None, input=None, output=None, force_git_root=None):
if matches: if matches:
io.tool_output(f'Models which match "{args.models}":') io.tool_output(f'Models which match "{args.models}":')
for model in matches: for model in matches:
io.tool_output(f"- {model}") fq, m = model
if fq == m:
io.tool_output(f"- {m}")
else:
io.tool_output(f"- {m} ({fq})")
else: else:
io.tool_output(f'No models match "{args.models}".') io.tool_output(f'No models match "{args.models}".')
return 0 return 0

View file

@ -346,7 +346,11 @@ def sanity_check_model(io, model):
if possible_matches: if possible_matches:
io.tool_error("Did you mean one of these?") io.tool_error("Did you mean one of these?")
for match in possible_matches: for match in possible_matches:
io.tool_error(f"- {match}") fq, m = match
if fq == m:
io.tool_error(f"- {m}")
else:
io.tool_error(f"- {m} ({fq})")
if show: if show:
io.tool_error("For more info see https://aider.chat/docs/llms.html#model-warnings") io.tool_error("For more info see https://aider.chat/docs/llms.html#model-warnings")
@ -355,30 +359,41 @@ def sanity_check_model(io, model):
def fuzzy_match_models(name): def fuzzy_match_models(name):
chat_models = [ chat_models = []
model for model, attrs in litellm.model_cost.items() if attrs.get("mode") == "chat" for model, attrs in litellm.model_cost.items():
] if attrs.get("mode") != "chat":
continue
provider = attrs["litellm_provider"] + "/"
if model.startswith(provider):
fq_model = model
else:
fq_model = provider + model
chat_models.append((fq_model, model))
# exactly matching model # exactly matching model
matching_models = [model for model in chat_models if name == model] # matching_models = [
if matching_models: # (fq,m) for fq,m in chat_models
return matching_models # if name == fq or name == m
# ]
# exactly matching provider # if matching_models:
matching_models = [ # return matching_models
model for model in chat_models if litellm.model_cost[model]["litellm_provider"] == name
]
if matching_models:
return matching_models
# Check for model names containing the name # Check for model names containing the name
matching_models = [model for model in chat_models if name in model] matching_models = [(fq, m) for fq, m in chat_models if name in fq]
if matching_models: if matching_models:
return matching_models return matching_models
# Check for slight misspellings # Check for slight misspellings
matching_models = difflib.get_close_matches(name, chat_models, n=3, cutoff=0.8) models = [m for fq, m in chat_models]
return matching_models matching_models = difflib.get_close_matches(name, models, n=3, cutoff=0.8)
if matching_models:
return list(zip(matching_models, matching_models))
fq_models = [fq for fq, m in chat_models]
matching_models = difflib.get_close_matches(name, fq_models, n=3, cutoff=0.8)
return list(zip(matching_models, matching_models))
def main(): def main():

View file

@ -56,7 +56,7 @@ aider --4-turbo-vision
aider --35-turbo aider --35-turbo
# List models available from OpenAI # List models available from OpenAI
aider --models openai aider --models openai/
``` ```
You can use `aider --model <model-name>` to use any other OpenAI model. You can use `aider --model <model-name>` to use any other OpenAI model.
@ -84,7 +84,7 @@ aider --opus
aider --sonnet aider --sonnet
# List models available from Anthropic # List models available from Anthropic
aider --models anthropic aider --models anthropic/
``` ```
You can use `aider --model <model-name>` to use any other Anthropic model. You can use `aider --model <model-name>` to use any other Anthropic model.
@ -106,7 +106,7 @@ export GROQ_API_KEY=<your-key-goes-here>
aider --model groq/llama3-70b-8192 aider --model groq/llama3-70b-8192
# List models available from Groq # List models available from Groq
aider --models groq aider --models groq/
``` ```
@ -125,7 +125,7 @@ export COHERE_API_KEY=<your-key-goes-here>
aider --model command-r-plus aider --model command-r-plus
# List models available from Cohere # List models available from Cohere
aider --models cohere_chat aider --models cohere_chat/
``` ```
## Azure ## Azure
@ -140,7 +140,7 @@ export AZURE_API_BASE=https://example-endpoint.openai.azure.com
aider --model azure/<your_deployment_name> aider --model azure/<your_deployment_name>
# List models available from Azure # List models available from Azure
aider --models azure aider --models azure/
``` ```
## OpenRouter ## OpenRouter
@ -159,7 +159,7 @@ aider --model openrouter/meta-llama/llama-3-70b-instruct
aider --model openrouter/<provider>/<model> aider --model openrouter/<provider>/<model>
# List models available from OpenRouter # List models available from OpenRouter
aider --models openrouter aider --models openrouter/
``` ```
## OpenAI compatible APIs ## OpenAI compatible APIs
@ -196,11 +196,11 @@ $ aider --models turbo
Aider v0.29.3-dev Aider v0.29.3-dev
Models which match "turbo": Models which match "turbo":
- gpt-4-turbo-preview - gpt-4-turbo-preview (openai/gpt-4-turbo-preview)
- gpt-4-turbo - gpt-4-turbo (openai/gpt-4-turbo)
- gpt-4-turbo-2024-04-09 - gpt-4-turbo-2024-04-09 (openai/gpt-4-turbo-2024-04-09)
- gpt-3.5-turbo - gpt-3.5-turbo (openai/gpt-3.5-turbo)
... - ...
``` ```
See the [list of providers supported by litellm](https://docs.litellm.ai/docs/providers) See the [list of providers supported by litellm](https://docs.litellm.ai/docs/providers)