mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-28 16:25:00 +00:00
added windows setx commands #584
This commit is contained in:
parent
d4c63ca959
commit
1a8ea2f81f
1 changed files with 56 additions and 22 deletions
78
docs/llms.md
78
docs/llms.md
|
@ -69,8 +69,8 @@ has been tested and benchmarked to work well with them:
|
|||
```
|
||||
pip install aider-chat
|
||||
|
||||
export OPENAI_API_KEY=<key> # Linux/Mac
|
||||
setx OPENAI_API_KEY <key> # Windows
|
||||
export OPENAI_API_KEY=<key> # Mac/Linux
|
||||
setx OPENAI_API_KEY <key> # Windows
|
||||
|
||||
# GPT-4 Turbo is used by default
|
||||
aider
|
||||
|
@ -101,7 +101,9 @@ has been tested and benchmarked to work well with them:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export ANTHROPIC_API_KEY=<key> # Windows: setx ANTHROPIC_API_KEY <key>
|
||||
|
||||
export ANTHROPIC_API_KEY=<key> # Mac/Linux
|
||||
setx ANTHROPIC_API_KEY <key> # Windows
|
||||
|
||||
# Claude 3 Opus
|
||||
aider --opus
|
||||
|
@ -127,7 +129,10 @@ You'll need a [Gemini API key](https://aistudio.google.com/app/u/2/apikey).
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export GEMINI_API_KEY=<key> # Windows: setx GEMINI_API_KEY <key>
|
||||
|
||||
export GEMINI_API_KEY=<key> # Mac/Linux
|
||||
setx GEMINI_API_KEY <key> # Windows
|
||||
|
||||
aider --model gemini/gemini-1.5-pro-latest
|
||||
|
||||
# List models available from Gemini
|
||||
|
@ -145,7 +150,10 @@ To use **Llama3 70B**:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export GROQ_API_KEY=<key> # Windows: setx GROQ_API_KEY <key>
|
||||
|
||||
export GROQ_API_KEY=<key> # Mac/Linux
|
||||
setx GROQ_API_KEY <key> # Windows
|
||||
|
||||
aider --model groq/llama3-70b-8192
|
||||
|
||||
# List models available from Groq
|
||||
|
@ -164,7 +172,10 @@ To use **Command-R+**:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export COHERE_API_KEY=<key> # Windows: setx COHERE_API_KEY <key>
|
||||
|
||||
export COHERE_API_KEY=<key> # Mac/Linux
|
||||
setx COHERE_API_KEY <key> # Windows
|
||||
|
||||
aider --model command-r-plus
|
||||
|
||||
# List models available from Cohere
|
||||
|
@ -178,10 +189,15 @@ Aider can connect to the OpenAI models on Azure.
|
|||
```
|
||||
pip install aider-chat
|
||||
|
||||
# Linux/Mac: # Windows:
|
||||
export AZURE_API_KEY=<key> # setx AZURE_API_KEY <key>
|
||||
export AZURE_API_VERSION=2023-05-15 # setx AZURE_API_VERSION 2023-05-15
|
||||
export AZURE_API_BASE=https://myendpt.openai.azure.com # setx AZURE_API_BASE https://myendpt.openai.azure.com
|
||||
# Mac/Linux:
|
||||
export AZURE_API_KEY=<key>
|
||||
export AZURE_API_VERSION=2023-05-15
|
||||
export AZURE_API_BASE=https://myendpt.openai.azure.com
|
||||
|
||||
# Windows:
|
||||
setx AZURE_API_KEY <key>
|
||||
setx AZURE_API_VERSION 2023-05-15
|
||||
setx AZURE_API_BASE https://myendpt.openai.azure.com
|
||||
|
||||
aider --model azure/<your_deployment_name>
|
||||
|
||||
|
@ -196,7 +212,9 @@ You'll need an [OpenRouter API key](https://openrouter.ai/keys).
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENROUTER_API_KEY=<key> # Windows: setx OPENROUTER_API_KEY <key>
|
||||
|
||||
export OPENROUTER_API_KEY=<key> # Mac/Linux
|
||||
setx OPENROUTER_API_KEY <key> # Windows
|
||||
|
||||
# Or any other open router model
|
||||
aider --model openrouter/<provider>/<model>
|
||||
|
@ -209,7 +227,10 @@ In particular, Llama3 70B works well with aider, at low cost:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENROUTER_API_KEY=<key>
|
||||
|
||||
export OPENROUTER_API_KEY=<key> # Mac/Linux
|
||||
setx OPENROUTER_API_KEY <key> # Windows
|
||||
|
||||
aider --model openrouter/meta-llama/llama-3-70b-instruct
|
||||
```
|
||||
|
||||
|
@ -225,9 +246,12 @@ ollama pull <model>
|
|||
# Start your ollama server
|
||||
ollama serve
|
||||
|
||||
# In another terminal window
|
||||
# In another terminal window...
|
||||
pip install aider-chat
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Windows: setx OLLAMA_API_BASE http://127.0.0.1:11434
|
||||
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
|
||||
setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows
|
||||
|
||||
aider --model ollama/<model>
|
||||
```
|
||||
|
||||
|
@ -238,8 +262,10 @@ In particular, `llama3:70b` works very well with aider:
|
|||
ollama pull llama3:70b
|
||||
ollama serve
|
||||
|
||||
# ...in another terminal window...
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Windows: setx OLLAMA_API_BASE http://127.0.0.1:11434
|
||||
# In another terminal window...
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
|
||||
setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows
|
||||
|
||||
aider --model ollama/llama3:70b
|
||||
```
|
||||
|
||||
|
@ -256,9 +282,13 @@ They appear to grant 5M tokens of free API usage to new accounts.
|
|||
```
|
||||
pip install aider-chat
|
||||
|
||||
# Linux/Mac: # Windows:
|
||||
export OPENAI_API_KEY=<key> # setx OPENAI_API_KEY <key>
|
||||
export OPENAI_API_BASE=https://api.deepseek.com/v1 # setx OPENAI_API_BASE https://api.deepseek.com/v1
|
||||
# Mac/Linux
|
||||
export OPENAI_API_KEY=<key>
|
||||
export OPENAI_API_BASE=https://api.deepseek.com/v1
|
||||
|
||||
# Windows:
|
||||
setx OPENAI_API_KEY <key>
|
||||
setx OPENAI_API_BASE https://api.deepseek.com/v1
|
||||
|
||||
aider --model openai/deepseek-coder
|
||||
```
|
||||
|
@ -275,9 +305,13 @@ Aider can connect to any LLM which is accessible via an OpenAI compatible API en
|
|||
```
|
||||
pip install aider-chat
|
||||
|
||||
# Linux/Mac: # Windows:
|
||||
export OPENAI_API_BASE=<endpoint> # setx OPENAI_API_BASE <endpoint>
|
||||
export OPENAI_API_KEY=<key> # setx OPENAI_API_KEY <key>
|
||||
# Mac/Linux:
|
||||
export OPENAI_API_BASE=<endpoint>
|
||||
export OPENAI_API_KEY=<key>
|
||||
|
||||
# Windows:
|
||||
setx OPENAI_API_BASE <endpoint>
|
||||
setx OPENAI_API_KEY <key>
|
||||
|
||||
# Prefix the model name with openai/
|
||||
aider --model openai/<model-name>
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue