mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-01 02:05:00 +00:00
Added --models
This commit is contained in:
parent
9971547e8f
commit
25b8d6fec8
3 changed files with 58 additions and 15 deletions
|
@ -176,6 +176,11 @@ def main(argv=None, input=None, output=None, force_git_root=None):
|
||||||
default=default_model,
|
default=default_model,
|
||||||
help=f"Specify the model to use for the main chat (default: {default_model})",
|
help=f"Specify the model to use for the main chat (default: {default_model})",
|
||||||
)
|
)
|
||||||
|
core_group.add_argument(
|
||||||
|
"--models",
|
||||||
|
metavar="MODEL",
|
||||||
|
help="List known models which match the (partial) MODEL name",
|
||||||
|
)
|
||||||
opus_model = "claude-3-opus-20240229"
|
opus_model = "claude-3-opus-20240229"
|
||||||
core_group.add_argument(
|
core_group.add_argument(
|
||||||
"--opus",
|
"--opus",
|
||||||
|
@ -564,6 +569,16 @@ def main(argv=None, input=None, output=None, force_git_root=None):
|
||||||
args.pretty = False
|
args.pretty = False
|
||||||
io.tool_output("VSCode terminal detected, pretty output has been disabled.")
|
io.tool_output("VSCode terminal detected, pretty output has been disabled.")
|
||||||
|
|
||||||
|
if args.models:
|
||||||
|
matches = models.fuzzy_match_models(args.models)
|
||||||
|
if matches:
|
||||||
|
io.tool_output(f'Models which match "{args.models}":')
|
||||||
|
for model in matches:
|
||||||
|
io.tool_output(f"- {model}")
|
||||||
|
else:
|
||||||
|
io.tool_output(f'No models match "{args.models}".')
|
||||||
|
return 0
|
||||||
|
|
||||||
if args.git:
|
if args.git:
|
||||||
git_root = setup_git(git_root, io)
|
git_root = setup_git(git_root, io)
|
||||||
if args.gitignore:
|
if args.gitignore:
|
||||||
|
|
|
@ -355,19 +355,29 @@ def sanity_check_model(io, model):
|
||||||
|
|
||||||
|
|
||||||
def fuzzy_match_models(name):
|
def fuzzy_match_models(name):
|
||||||
models = litellm.model_cost.keys()
|
chat_models = [
|
||||||
|
model for model, attrs in litellm.model_cost.items() if attrs.get("mode") == "chat"
|
||||||
|
]
|
||||||
|
|
||||||
# Check for exact match first
|
# exactly matching model
|
||||||
if name in models:
|
matching_models = [model for model in chat_models if name == model]
|
||||||
return [name]
|
if matching_models:
|
||||||
|
return matching_models
|
||||||
|
|
||||||
# Check for models containing the name
|
# exactly matching provider
|
||||||
matching_models = [model for model in models if name in model]
|
matching_models = [
|
||||||
|
model for model in chat_models if litellm.model_cost[model]["litellm_provider"] == name
|
||||||
|
]
|
||||||
|
if matching_models:
|
||||||
|
return matching_models
|
||||||
|
|
||||||
# If no matches found, check for slight misspellings
|
# Check for model names containing the name
|
||||||
if not matching_models:
|
matching_models = [model for model in chat_models if name in model]
|
||||||
matching_models = difflib.get_close_matches(name, models, n=3, cutoff=0.8)
|
if matching_models:
|
||||||
|
return matching_models
|
||||||
|
|
||||||
|
# Check for slight misspellings
|
||||||
|
matching_models = difflib.get_close_matches(name, chat_models, n=3, cutoff=0.8)
|
||||||
return matching_models
|
return matching_models
|
||||||
|
|
||||||
|
|
||||||
|
|
30
docs/llms.md
30
docs/llms.md
|
@ -23,7 +23,7 @@ So you should expect that models which are less capable than GPT-3.5 may struggl
|
||||||
|
|
||||||
- [OpenAI](#openai)
|
- [OpenAI](#openai)
|
||||||
- [Anthropic](#anthropic)
|
- [Anthropic](#anthropic)
|
||||||
- [Llama3](#groq)
|
- [Groq & Llama3](#groq)
|
||||||
- [Cohere](#cohere)
|
- [Cohere](#cohere)
|
||||||
- [Azure](#azure)
|
- [Azure](#azure)
|
||||||
- [OpenRouter](#openrouter)
|
- [OpenRouter](#openrouter)
|
||||||
|
@ -54,6 +54,9 @@ aider --4-turbo-vision
|
||||||
|
|
||||||
# GPT-3.5 Turbo
|
# GPT-3.5 Turbo
|
||||||
aider --35-turbo
|
aider --35-turbo
|
||||||
|
|
||||||
|
# List models available from OpenAI
|
||||||
|
aider --models openai
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use `aider --model <model-name>` to use any other OpenAI model.
|
You can use `aider --model <model-name>` to use any other OpenAI model.
|
||||||
|
@ -79,6 +82,9 @@ aider --opus
|
||||||
|
|
||||||
# Claude 3 Sonnet
|
# Claude 3 Sonnet
|
||||||
aider --sonnet
|
aider --sonnet
|
||||||
|
|
||||||
|
# List models available from Anthropic
|
||||||
|
aider --models anthropic
|
||||||
```
|
```
|
||||||
|
|
||||||
You can use `aider --model <model-name>` to use any other Anthropic model.
|
You can use `aider --model <model-name>` to use any other Anthropic model.
|
||||||
|
@ -98,8 +104,12 @@ To use **Llama3 70B**:
|
||||||
pip install aider-chat
|
pip install aider-chat
|
||||||
export GROQ_API_KEY=<your-key-goes-here>
|
export GROQ_API_KEY=<your-key-goes-here>
|
||||||
aider --model groq/llama3-70b-8192
|
aider --model groq/llama3-70b-8192
|
||||||
|
|
||||||
|
# List models available from Groq
|
||||||
|
aider --models groq
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Cohere
|
## Cohere
|
||||||
|
|
||||||
Cohere offers *free* API access to their models.
|
Cohere offers *free* API access to their models.
|
||||||
|
@ -113,6 +123,9 @@ To use **Command-R+**:
|
||||||
pip install aider-chat
|
pip install aider-chat
|
||||||
export COHERE_API_KEY=<your-key-goes-here>
|
export COHERE_API_KEY=<your-key-goes-here>
|
||||||
aider --model command-r-plus
|
aider --model command-r-plus
|
||||||
|
|
||||||
|
# List models available from Cohere
|
||||||
|
aider --models cohere_chat
|
||||||
```
|
```
|
||||||
|
|
||||||
## Azure
|
## Azure
|
||||||
|
@ -125,6 +138,9 @@ export AZURE_API_KEY=<your-key-goes-here>
|
||||||
export AZURE_API_VERSION=2023-05-15
|
export AZURE_API_VERSION=2023-05-15
|
||||||
export AZURE_API_BASE=https://example-endpoint.openai.azure.com
|
export AZURE_API_BASE=https://example-endpoint.openai.azure.com
|
||||||
aider --model azure/<your_deployment_name>
|
aider --model azure/<your_deployment_name>
|
||||||
|
|
||||||
|
# List models available from Azure
|
||||||
|
aider --models azure
|
||||||
```
|
```
|
||||||
|
|
||||||
## OpenRouter
|
## OpenRouter
|
||||||
|
@ -141,6 +157,9 @@ aider --model openrouter/meta-llama/llama-3-70b-instruct
|
||||||
|
|
||||||
# Or any other open router model
|
# Or any other open router model
|
||||||
aider --model openrouter/<provider>/<model>
|
aider --model openrouter/<provider>/<model>
|
||||||
|
|
||||||
|
# List models available from OpenRouter
|
||||||
|
aider --models openrouter
|
||||||
```
|
```
|
||||||
|
|
||||||
## OpenAI compatible APIs
|
## OpenAI compatible APIs
|
||||||
|
@ -166,22 +185,21 @@ Aider uses the [litellm](https://docs.litellm.ai/docs/providers) package
|
||||||
to connect to hundreds of other models.
|
to connect to hundreds of other models.
|
||||||
You can use `aider --model <model-name>` to use any supported model.
|
You can use `aider --model <model-name>` to use any supported model.
|
||||||
|
|
||||||
To explore the list of supported models you can run `aider --model <model-name>`
|
To explore the list of supported models you can run `aider --models <model-name>`
|
||||||
with a partial model name.
|
with a partial model name.
|
||||||
If the supplied name is not an exact match for a known model, aider will
|
If the supplied name is not an exact match for a known model, aider will
|
||||||
return a list of possible matching models.
|
return a list of possible matching models.
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ aider --model turbo
|
$ aider --models turbo
|
||||||
|
|
||||||
Model turbo: Unknown model, context window size and token costs unavailable.
|
Aider v0.29.3-dev
|
||||||
Did you mean one of these?
|
Models which match "turbo":
|
||||||
- gpt-4-turbo-preview
|
- gpt-4-turbo-preview
|
||||||
- gpt-4-turbo
|
- gpt-4-turbo
|
||||||
- gpt-4-turbo-2024-04-09
|
- gpt-4-turbo-2024-04-09
|
||||||
- gpt-3.5-turbo
|
- gpt-3.5-turbo
|
||||||
- gpt-3.5-turbo-0301
|
|
||||||
...
|
...
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue