mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-24 14:25:00 +00:00
Merge branch 'main' into gemini-editblock-and-examples
This commit is contained in:
commit
921c7ceb80
21 changed files with 323 additions and 91 deletions
|
@ -3,8 +3,8 @@
|
|||
|
||||
### v0.31.0
|
||||
|
||||
- Aider is now also AI pair programming in your browser! Use the `--browser` switch to launch am experimental browser based version of aider.
|
||||
- Switch models during the chat with `/model <name>` and search the list of available models with `/model <query>`.
|
||||
- [Aider is now also AI pair programming in your browser!](https://aider.chat/2024/05/02/browser.html) Use the `--browser` switch to launch an experimental browser based version of aider.
|
||||
- Switch models during the chat with `/model <name>` and search the list of available models with `/models <query>`.
|
||||
|
||||
### v0.30.1
|
||||
|
||||
|
|
|
@ -176,6 +176,7 @@ For more information, see the [FAQ](https://aider.chat/docs/faq.html).
|
|||
* *This project is stellar.* -- [funkytaco](https://github.com/paul-gauthier/aider/issues/112#issuecomment-1637429008)
|
||||
* *Amazing project, definitely the best AI coding assistant I've used.* -- [joshuavial](https://github.com/paul-gauthier/aider/issues/84)
|
||||
* *I absolutely love using Aider ... It makes software development feel so much lighter as an experience.* -- [principalideal0](https://discord.com/channels/1131200896827654144/1133421607499595858/1229689636012691468)
|
||||
* *I have been recovering from multiple shoulder surgeries ... and have used aider extensively. It has allowed me to continue productivity.* -- [codeninja](https://www.reddit.com/r/OpenAI/s/nmNwkHy1zG)
|
||||
* *I am an aider addict. I'm getting so much more work done, but in less time.* -- [dandandan](https://discord.com/channels/1131200896827654144/1131200896827654149/1135913253483069470)
|
||||
* *After wasting $100 on tokens trying to find something better, I'm back to Aider. It blows everything else out of the water hands down, there's no competition whatsoever.* -- [SystemSculpt](https://discord.com/channels/1131200896827654144/1131200896827654149/1178736602797846548)
|
||||
* *Best agent for actual dev work in existing codebases.* -- [Nick Dobos](https://twitter.com/NickADobos/status/1690408967963652097?s=20)
|
||||
|
|
|
@ -50,7 +50,6 @@
|
|||
{% if site.github.is_project_page %}
|
||||
<span class="site-footer-owner"><a href="{{ site.github.repository_url }}">{{ site.github.repository_name }}</a> is maintained by <a href="{{ site.github.owner_url }}">{{ site.github.owner_name }}</a>.</span>
|
||||
{% endif %}
|
||||
<span class="site-footer-credits">This page was generated by <a href="https://pages.github.com">GitHub Pages</a>.</span>
|
||||
</footer>
|
||||
</main>
|
||||
</body>
|
||||
|
|
51
_posts/2024-05-02-browser.md
Normal file
51
_posts/2024-05-02-browser.md
Normal file
|
@ -0,0 +1,51 @@
|
|||
---
|
||||
title: Aider in your browser
|
||||
excerpt: Aider has an experimental browser UI, allowing you to collaborate with LLMs on code in your local git repo.
|
||||
highlight_image: /assets/browser.jpg
|
||||
---
|
||||
# Aider in your browser
|
||||
|
||||
<div class="video-container">
|
||||
<video controls loop poster="/assets/browser.jpg">
|
||||
<source src="/assets/aider-browser-social.mp4" type="video/mp4">
|
||||
<a href="/assets/aider-browser-social.mp4">Aider browser UI demo video</a>
|
||||
</video>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.video-container {
|
||||
position: relative;
|
||||
padding-bottom: 101.89%; /* 1080 / 1060 = 1.0189 */
|
||||
height: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.video-container video {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
</style>
|
||||
|
||||
Use aider's new experimental browser UI to collaborate with LLMs
|
||||
to edit code in your local git repo.
|
||||
Aider will directly edit the code in your local source files,
|
||||
and [git commit the changes](https://aider.chat/docs/faq.html#how-does-aider-use-git)
|
||||
with sensible commit messages.
|
||||
You can start a new project or work with an existing git repo.
|
||||
Aider works well with GPT 3.5, GPT-4, GPT-4 Turbo with Vision,
|
||||
and Claude 3 Opus.
|
||||
It also supports [connecting to almost any LLM](https://aider.chat/docs/llms.html).
|
||||
|
||||
Use the `--browser` switch to launch the browser version of aider:
|
||||
|
||||
```
|
||||
pip install aider-chat
|
||||
|
||||
export OPENAI_API_KEY=<key> # Mac/Linux
|
||||
setx OPENAI_API_KEY <key> # Windows
|
||||
|
||||
aider --browser
|
||||
```
|
|
@ -1 +1 @@
|
|||
__version__ = "0.30.2-dev"
|
||||
__version__ = "0.31.2-dev"
|
||||
|
|
|
@ -151,6 +151,13 @@ def get_parser(default_config_files, git_root):
|
|||
default=1024,
|
||||
help="Max number of tokens to use for repo map, use 0 to disable (default: 1024)",
|
||||
)
|
||||
default_env_file = os.path.join(git_root, ".env") if git_root else ".env"
|
||||
group.add_argument(
|
||||
"--env-file",
|
||||
metavar="ENV_FILE",
|
||||
default=default_env_file,
|
||||
help="Specify the .env file to load (default: .env in git root)",
|
||||
)
|
||||
|
||||
##########
|
||||
group = parser.add_argument_group("History Files")
|
||||
|
|
|
@ -66,6 +66,7 @@ class Coder:
|
|||
main_model=None,
|
||||
edit_format=None,
|
||||
io=None,
|
||||
from_coder=None,
|
||||
**kwargs,
|
||||
):
|
||||
from . import EditBlockCoder, UnifiedDiffCoder, WholeFileCoder
|
||||
|
@ -76,15 +77,42 @@ class Coder:
|
|||
if edit_format is None:
|
||||
edit_format = main_model.edit_format
|
||||
|
||||
if from_coder:
|
||||
use_kwargs = dict(from_coder.original_kwargs) # copy orig kwargs
|
||||
|
||||
# If the edit format changes, we can't leave old ASSISTANT
|
||||
# messages in the chat history. The old edit format will
|
||||
# confused the new LLM. It may try and imitate it, disobeying
|
||||
# the system prompt.
|
||||
done_messages = from_coder.done_messages
|
||||
if edit_format != from_coder.edit_format and done_messages:
|
||||
done_messages = from_coder.summarizer.summarize_all(done_messages)
|
||||
|
||||
# Bring along context from the old Coder
|
||||
update = dict(
|
||||
fnames=from_coder.get_inchat_relative_files(),
|
||||
done_messages=done_messages,
|
||||
cur_messages=from_coder.cur_messages,
|
||||
)
|
||||
|
||||
use_kwargs.update(update) # override to complete the switch
|
||||
use_kwargs.update(kwargs) # override passed kwargs
|
||||
|
||||
kwargs = use_kwargs
|
||||
|
||||
if edit_format == "diff":
|
||||
return EditBlockCoder(main_model, io, **kwargs)
|
||||
res = EditBlockCoder(main_model, io, **kwargs)
|
||||
elif edit_format == "whole":
|
||||
return WholeFileCoder(main_model, io, **kwargs)
|
||||
res = WholeFileCoder(main_model, io, **kwargs)
|
||||
elif edit_format == "udiff":
|
||||
return UnifiedDiffCoder(main_model, io, **kwargs)
|
||||
res = UnifiedDiffCoder(main_model, io, **kwargs)
|
||||
else:
|
||||
raise ValueError(f"Unknown edit format {edit_format}")
|
||||
|
||||
res.original_kwargs = dict(kwargs)
|
||||
|
||||
return res
|
||||
|
||||
def get_announcements(self):
|
||||
lines = []
|
||||
lines.append(f"Aider v{__version__}")
|
||||
|
@ -153,6 +181,8 @@ class Coder:
|
|||
use_git=True,
|
||||
voice_language=None,
|
||||
aider_ignore_file=None,
|
||||
cur_messages=None,
|
||||
done_messages=None,
|
||||
):
|
||||
if not fnames:
|
||||
fnames = []
|
||||
|
@ -166,8 +196,16 @@ class Coder:
|
|||
|
||||
self.verbose = verbose
|
||||
self.abs_fnames = set()
|
||||
self.cur_messages = []
|
||||
self.done_messages = []
|
||||
|
||||
if cur_messages:
|
||||
self.cur_messages = cur_messages
|
||||
else:
|
||||
self.cur_messages = []
|
||||
|
||||
if done_messages:
|
||||
self.done_messages = done_messages
|
||||
else:
|
||||
self.done_messages = []
|
||||
|
||||
self.io = io
|
||||
self.stream = stream
|
||||
|
|
|
@ -5,15 +5,23 @@ import sys
|
|||
from pathlib import Path
|
||||
|
||||
import git
|
||||
import litellm
|
||||
import openai
|
||||
from prompt_toolkit.completion import Completion
|
||||
|
||||
from aider import prompts, voice
|
||||
from aider import models, prompts, voice
|
||||
from aider.scrape import Scraper
|
||||
from aider.utils import is_image_file
|
||||
|
||||
from .dump import dump # noqa: F401
|
||||
|
||||
litellm.suppress_debug_info = True
|
||||
|
||||
|
||||
class SwitchModel(Exception):
|
||||
def __init__(self, model):
|
||||
self.model = model
|
||||
|
||||
|
||||
class Commands:
|
||||
voice = None
|
||||
|
@ -28,6 +36,30 @@ class Commands:
|
|||
|
||||
self.voice_language = voice_language
|
||||
|
||||
def cmd_model(self, args):
|
||||
"Switch to a new LLM"
|
||||
|
||||
model_name = args.strip()
|
||||
model = models.Model(model_name)
|
||||
models.sanity_check_models(self.io, model)
|
||||
raise SwitchModel(model)
|
||||
|
||||
def completions_model(self, partial):
|
||||
models = litellm.model_cost.keys()
|
||||
for model in models:
|
||||
if partial.lower() in model.lower():
|
||||
yield Completion(model, start_position=-len(partial))
|
||||
|
||||
def cmd_models(self, args):
|
||||
"Search the list of available models"
|
||||
|
||||
args = args.strip()
|
||||
|
||||
if args:
|
||||
models.print_matching_models(self.io, args)
|
||||
else:
|
||||
self.io.tool_output("Please provide a partial model name to search for.")
|
||||
|
||||
def cmd_web(self, args):
|
||||
"Use headless selenium to scrape a webpage and add the content to the chat"
|
||||
url = args.strip()
|
||||
|
@ -99,6 +131,8 @@ class Commands:
|
|||
matching_commands, first_word, rest_inp = res
|
||||
if len(matching_commands) == 1:
|
||||
return self.do_run(matching_commands[0][1:], rest_inp)
|
||||
elif first_word in matching_commands:
|
||||
return self.do_run(first_word[1:], rest_inp)
|
||||
elif len(matching_commands) > 1:
|
||||
self.io.tool_error(f"Ambiguous command: {', '.join(matching_commands)}")
|
||||
else:
|
||||
|
|
|
@ -295,7 +295,7 @@ class GUI:
|
|||
|
||||
# stuff a bunch of vertical whitespace at the top
|
||||
# to get all the chat text to the bottom
|
||||
self.messages.container(height=300, border=False)
|
||||
# self.messages.container(height=300, border=False)
|
||||
|
||||
with self.messages:
|
||||
for msg in self.state.messages:
|
||||
|
|
|
@ -5,11 +5,13 @@ from pathlib import Path
|
|||
|
||||
import git
|
||||
import litellm
|
||||
from dotenv import load_dotenv
|
||||
from streamlit.web import cli
|
||||
|
||||
from aider import __version__, models
|
||||
from aider.args import get_parser
|
||||
from aider.coders import Coder
|
||||
from aider.commands import SwitchModel
|
||||
from aider.io import InputOutput
|
||||
from aider.repo import GitRepo
|
||||
from aider.versioncheck import check_version
|
||||
|
@ -217,9 +219,12 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
args.assistant_output_color = "blue"
|
||||
args.code_theme = "default"
|
||||
|
||||
if return_coder and args.yes is None:
|
||||
args.yes = True
|
||||
|
||||
io = InputOutput(
|
||||
args.pretty,
|
||||
args.yes or return_coder, # Force --yes if return_coder
|
||||
args.yes,
|
||||
args.input_history_file,
|
||||
args.chat_history_file,
|
||||
input=input,
|
||||
|
@ -270,17 +275,7 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
return 0 if not update_available else 1
|
||||
|
||||
if args.models:
|
||||
matches = models.fuzzy_match_models(args.models)
|
||||
if matches:
|
||||
io.tool_output(f'Models which match "{args.models}":')
|
||||
for model in matches:
|
||||
fq, m = model
|
||||
if fq == m:
|
||||
io.tool_output(f"- {m}")
|
||||
else:
|
||||
io.tool_output(f"- {m} ({fq})")
|
||||
else:
|
||||
io.tool_output(f'No models match "{args.models}".')
|
||||
models.print_matching_models(io, args.models)
|
||||
return 0
|
||||
|
||||
if args.git:
|
||||
|
@ -296,6 +291,9 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
cmd_line = scrub_sensitive_info(args, cmd_line)
|
||||
io.tool_output(cmd_line, log_only=True)
|
||||
|
||||
if args.env_file:
|
||||
load_dotenv(args.env_file)
|
||||
|
||||
if args.anthropic_api_key:
|
||||
os.environ["ANTHROPIC_API_KEY"] = args.anthropic_api_key
|
||||
|
||||
|
@ -337,6 +335,7 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
voice_language=args.voice_language,
|
||||
aider_ignore_file=args.aiderignore,
|
||||
)
|
||||
|
||||
except ValueError as err:
|
||||
io.tool_error(str(err))
|
||||
return 1
|
||||
|
@ -398,7 +397,13 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
|
|||
return 1
|
||||
return
|
||||
|
||||
coder.run()
|
||||
while True:
|
||||
try:
|
||||
coder.run()
|
||||
return
|
||||
except SwitchModel as switch:
|
||||
coder = Coder.create(main_model=switch.model, io=io, from_coder=coder)
|
||||
coder.show_announcements()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -431,6 +431,20 @@ def fuzzy_match_models(name):
|
|||
return list(zip(matching_models, matching_models))
|
||||
|
||||
|
||||
def print_matching_models(io, search):
|
||||
matches = fuzzy_match_models(search)
|
||||
if matches:
|
||||
io.tool_output(f'Models which match "{search}":')
|
||||
for model in matches:
|
||||
fq, m = model
|
||||
if fq == m:
|
||||
io.tool_output(f"- {m}")
|
||||
else:
|
||||
io.tool_output(f"- {m} ({fq})")
|
||||
else:
|
||||
io.tool_output(f'No models match "{search}".')
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) != 2:
|
||||
print("Usage: python models.py <model_name>")
|
||||
|
|
|
@ -12,6 +12,8 @@ from aider import __version__
|
|||
|
||||
aider_user_agent = f"Aider/{__version__} +https://aider.chat"
|
||||
|
||||
# Playwright is nice because it has a simple way to install dependencies on most
|
||||
# platforms.
|
||||
PLAYWRIGHT_INFO = """
|
||||
For better web scraping, install Playwright chromium with this command in your terminal:
|
||||
|
||||
|
@ -26,12 +28,40 @@ class Scraper:
|
|||
playwright_available = None
|
||||
playwright_instructions_shown = False
|
||||
|
||||
# Public API...
|
||||
def __init__(self, print_error=None):
|
||||
"""
|
||||
`print_error` - a function to call to print error/debug info.
|
||||
"""
|
||||
if print_error:
|
||||
self.print_error = print_error
|
||||
else:
|
||||
self.print_error = print
|
||||
|
||||
def scrape(self, url):
|
||||
"""
|
||||
Scrape a url and turn it into readable markdown.
|
||||
|
||||
`url` - the URLto scrape.
|
||||
"""
|
||||
self.try_playwright()
|
||||
|
||||
if self.playwright_available:
|
||||
content = self.scrape_with_playwright(url)
|
||||
else:
|
||||
content = self.scrape_with_httpx(url)
|
||||
|
||||
if not content:
|
||||
return
|
||||
|
||||
self.try_pandoc()
|
||||
|
||||
content = self.html_to_markdown(content)
|
||||
# content = html_to_text(content)
|
||||
|
||||
return content
|
||||
|
||||
# Internals...
|
||||
def scrape_with_playwright(self, url):
|
||||
with sync_playwright() as p:
|
||||
try:
|
||||
|
@ -88,24 +118,6 @@ class Scraper:
|
|||
self.print_error(f"An error occurred: {err}")
|
||||
return None
|
||||
|
||||
def scrape(self, url):
|
||||
self.try_playwright()
|
||||
|
||||
if self.playwright_available:
|
||||
content = self.scrape_with_playwright(url)
|
||||
else:
|
||||
content = self.scrape_with_httpx(url)
|
||||
|
||||
if not content:
|
||||
return
|
||||
|
||||
self.try_pandoc()
|
||||
|
||||
content = self.html_to_markdown(content)
|
||||
# content = html_to_text(content)
|
||||
|
||||
return content
|
||||
|
||||
def try_pandoc(self):
|
||||
if self.pandoc_available:
|
||||
return
|
||||
|
|
|
@ -25,6 +25,7 @@ litellm.suppress_debug_info = True
|
|||
RateLimitError,
|
||||
APIConnectionError,
|
||||
httpx.ConnectError,
|
||||
httpx.RemoteProtocolError,
|
||||
litellm.exceptions.ServiceUnavailableError,
|
||||
),
|
||||
max_tries=10,
|
||||
|
|
BIN
assets/aider-browser-social.mp4
Normal file
BIN
assets/aider-browser-social.mp4
Normal file
Binary file not shown.
BIN
assets/browser.jpg
Normal file
BIN
assets/browser.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 397 KiB |
|
@ -14,9 +14,11 @@ layout: default
|
|||
{{ post.content | strip_html | truncatewords: 100 }}...
|
||||
{% endif %}
|
||||
{% if post.highlight_image %}
|
||||
<div class="post-highlight">
|
||||
<img src="{{ site.baseurl }}{{ post.highlight_image }}" alt="Highlight Image">
|
||||
</div>
|
||||
<a href="{{ site.baseurl }}{{ post.url }}">
|
||||
<div class="post-highlight">
|
||||
<img src="{{ site.baseurl }}{{ post.highlight_image }}" alt="Highlight Image">
|
||||
</div>
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
</div>
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# This file is autogenerated by pip-compile with Python 3.11
|
||||
# by the following command:
|
||||
#
|
||||
# pip-compile --output-file=dev-requirements.txt dev-requirements.in
|
||||
# pip-compile dev-requirements.in
|
||||
#
|
||||
alabaster==0.7.16
|
||||
# via sphinx
|
||||
|
@ -34,11 +34,11 @@ docutils==0.20.1
|
|||
# via
|
||||
# sphinx
|
||||
# sphinx-rtd-theme
|
||||
filelock==3.13.4
|
||||
filelock==3.14.0
|
||||
# via virtualenv
|
||||
fonttools==4.51.0
|
||||
# via matplotlib
|
||||
identify==2.5.35
|
||||
identify==2.5.36
|
||||
# via pre-commit
|
||||
idna==3.7
|
||||
# via requests
|
||||
|
@ -85,9 +85,9 @@ pillow==10.3.0
|
|||
# via matplotlib
|
||||
pip-tools==7.4.1
|
||||
# via -r dev-requirements.in
|
||||
platformdirs==4.2.0
|
||||
platformdirs==4.2.1
|
||||
# via virtualenv
|
||||
pluggy==1.4.0
|
||||
pluggy==1.5.0
|
||||
# via pytest
|
||||
pox==0.3.4
|
||||
# via pathos
|
||||
|
@ -101,11 +101,11 @@ pygments==2.17.2
|
|||
# sphinx
|
||||
pyparsing==3.1.2
|
||||
# via matplotlib
|
||||
pyproject-hooks==1.0.0
|
||||
pyproject-hooks==1.1.0
|
||||
# via
|
||||
# build
|
||||
# pip-tools
|
||||
pytest==8.1.1
|
||||
pytest==8.2.0
|
||||
# via -r dev-requirements.in
|
||||
python-dateutil==2.9.0.post0
|
||||
# via
|
||||
|
@ -125,7 +125,7 @@ six==1.16.0
|
|||
# via python-dateutil
|
||||
snowballstemmer==2.2.0
|
||||
# via sphinx
|
||||
sphinx==7.3.6
|
||||
sphinx==7.3.7
|
||||
# via
|
||||
# sphinx-rtd-theme
|
||||
# sphinxcontrib-jquery
|
||||
|
@ -153,7 +153,7 @@ tzdata==2024.1
|
|||
# via pandas
|
||||
urllib3==2.2.1
|
||||
# via requests
|
||||
virtualenv==20.25.3
|
||||
virtualenv==20.26.1
|
||||
# via pre-commit
|
||||
wheel==0.43.0
|
||||
# via pip-tools
|
||||
|
|
|
@ -132,6 +132,12 @@ Installing PortAudio is completely optional, but can usually be accomplished lik
|
|||
|
||||
## Add aider to your editor (optional)
|
||||
|
||||
Other projects have integrated aider into some IDE/editors.
|
||||
It's not clear if they are tracking the latest
|
||||
versions of aider,
|
||||
so it may be best to just run the latest
|
||||
aider in a terminal alongside your editor.
|
||||
|
||||
### NeoVim
|
||||
|
||||
[joshuavial](https://github.com/joshuavial) provided a NeoVim plugin for aider:
|
||||
|
|
96
docs/llms.md
96
docs/llms.md
|
@ -68,7 +68,9 @@ has been tested and benchmarked to work well with them:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENAI_API_KEY=<your-key-goes-here>
|
||||
|
||||
export OPENAI_API_KEY=<key> # Mac/Linux
|
||||
setx OPENAI_API_KEY <key> # Windows
|
||||
|
||||
# GPT-4 Turbo is used by default
|
||||
aider
|
||||
|
@ -99,7 +101,9 @@ has been tested and benchmarked to work well with them:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export ANTHROPIC_API_KEY=<your-key-goes-here>
|
||||
|
||||
export ANTHROPIC_API_KEY=<key> # Mac/Linux
|
||||
setx ANTHROPIC_API_KEY <key> # Windows
|
||||
|
||||
# Claude 3 Opus
|
||||
aider --opus
|
||||
|
@ -125,7 +129,10 @@ You'll need a [Gemini API key](https://aistudio.google.com/app/u/2/apikey).
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export GEMINI_API_KEY=<your-key-goes-here>
|
||||
|
||||
export GEMINI_API_KEY=<key> # Mac/Linux
|
||||
setx GEMINI_API_KEY <key> # Windows
|
||||
|
||||
aider --model gemini/gemini-1.5-pro-latest
|
||||
|
||||
# List models available from Gemini
|
||||
|
@ -143,7 +150,10 @@ To use **Llama3 70B**:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export GROQ_API_KEY=<your-key-goes-here>
|
||||
|
||||
export GROQ_API_KEY=<key> # Mac/Linux
|
||||
setx GROQ_API_KEY <key> # Windows
|
||||
|
||||
aider --model groq/llama3-70b-8192
|
||||
|
||||
# List models available from Groq
|
||||
|
@ -162,7 +172,10 @@ To use **Command-R+**:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export COHERE_API_KEY=<your-key-goes-here>
|
||||
|
||||
export COHERE_API_KEY=<key> # Mac/Linux
|
||||
setx COHERE_API_KEY <key> # Windows
|
||||
|
||||
aider --model command-r-plus
|
||||
|
||||
# List models available from Cohere
|
||||
|
@ -175,9 +188,17 @@ Aider can connect to the OpenAI models on Azure.
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export AZURE_API_KEY=<your-key-goes-here>
|
||||
|
||||
# Mac/Linux:
|
||||
export AZURE_API_KEY=<key>
|
||||
export AZURE_API_VERSION=2023-05-15
|
||||
export AZURE_API_BASE=https://example-endpoint.openai.azure.com
|
||||
export AZURE_API_BASE=https://myendpt.openai.azure.com
|
||||
|
||||
# Windows:
|
||||
setx AZURE_API_KEY <key>
|
||||
setx AZURE_API_VERSION 2023-05-15
|
||||
setx AZURE_API_BASE https://myendpt.openai.azure.com
|
||||
|
||||
aider --model azure/<your_deployment_name>
|
||||
|
||||
# List models available from Azure
|
||||
|
@ -191,7 +212,9 @@ You'll need an [OpenRouter API key](https://openrouter.ai/keys).
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENROUTER_API_KEY=<your-key-goes-here>
|
||||
|
||||
export OPENROUTER_API_KEY=<key> # Mac/Linux
|
||||
setx OPENROUTER_API_KEY <key> # Windows
|
||||
|
||||
# Or any other open router model
|
||||
aider --model openrouter/<provider>/<model>
|
||||
|
@ -204,7 +227,10 @@ In particular, Llama3 70B works well with aider, at low cost:
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENROUTER_API_KEY=<your-key-goes-here>
|
||||
|
||||
export OPENROUTER_API_KEY=<key> # Mac/Linux
|
||||
setx OPENROUTER_API_KEY <key> # Windows
|
||||
|
||||
aider --model openrouter/meta-llama/llama-3-70b-instruct
|
||||
```
|
||||
|
||||
|
@ -215,15 +241,18 @@ Aider can connect to local Ollama models.
|
|||
|
||||
```
|
||||
# Pull the model
|
||||
ollama pull <MODEL>
|
||||
ollama pull <model>
|
||||
|
||||
# Start your ollama server
|
||||
ollama serve
|
||||
|
||||
# In another terminal window
|
||||
# In another terminal window...
|
||||
pip install aider-chat
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434
|
||||
aider --model ollama/<MODEL>
|
||||
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
|
||||
setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows
|
||||
|
||||
aider --model ollama/<model>
|
||||
```
|
||||
|
||||
In particular, `llama3:70b` works very well with aider:
|
||||
|
@ -233,8 +262,10 @@ In particular, `llama3:70b` works very well with aider:
|
|||
ollama pull llama3:70b
|
||||
ollama serve
|
||||
|
||||
# ...in another terminal window...
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434
|
||||
# In another terminal window...
|
||||
export OLLAMA_API_BASE=http://127.0.0.1:11434 # Mac/Linux
|
||||
setx OLLAMA_API_BASE http://127.0.0.1:11434 # Windows
|
||||
|
||||
aider --model ollama/llama3:70b
|
||||
```
|
||||
|
||||
|
@ -250,8 +281,15 @@ They appear to grant 5M tokens of free API usage to new accounts.
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENAI_API_KEY=<your-key-goes-here>
|
||||
|
||||
# Mac/Linux:
|
||||
export OPENAI_API_KEY=<key>
|
||||
export OPENAI_API_BASE=https://api.deepseek.com/v1
|
||||
|
||||
# Windows:
|
||||
setx OPENAI_API_KEY <key>
|
||||
setx OPENAI_API_BASE https://api.deepseek.com/v1
|
||||
|
||||
aider --model openai/deepseek-coder
|
||||
```
|
||||
|
||||
|
@ -266,10 +304,14 @@ Aider can connect to any LLM which is accessible via an OpenAI compatible API en
|
|||
|
||||
```
|
||||
pip install aider-chat
|
||||
export OPENAI_API_BASE=<your-endpoint-goes-here>
|
||||
|
||||
# If your endpoint needs a key
|
||||
export OPENAI_API_KEY=<your-key-goes-here>
|
||||
# Mac/Linux:
|
||||
export OPENAI_API_BASE=<endpoint>
|
||||
export OPENAI_API_KEY=<key>
|
||||
|
||||
# Windows:
|
||||
setx OPENAI_API_BASE <endpoint>
|
||||
setx OPENAI_API_KEY <key>
|
||||
|
||||
# Prefix the model name with openai/
|
||||
aider --model openai/<model-name>
|
||||
|
@ -389,19 +431,21 @@ use these switches: `--edit-format diff` or `--edit-format udiff`.
|
|||
# Using a .env file
|
||||
|
||||
Aider will read environment variables from a `.env` file in
|
||||
the current directory.
|
||||
You can use it to store various keys and other settings for the
|
||||
root of your git repo or in current directory.
|
||||
You can give it an explicit file to load with the `--env-file <filename>` parameter.
|
||||
|
||||
You can use a `.env` file to store various keys and other settings for the
|
||||
models you use with aider.
|
||||
|
||||
Here is an example `.env` file:
|
||||
|
||||
```
|
||||
OPENAI_API_KEY=<your-key-goes-here>
|
||||
ANTHROPIC_API_KEY=<your-key-goes-here>
|
||||
GROQ_API_KEY=<your-key-goes-here>
|
||||
OPENROUTER_API_KEY=<your-key-goes-here>
|
||||
OPENAI_API_KEY=<key>
|
||||
ANTHROPIC_API_KEY=<key>
|
||||
GROQ_API_KEY=<key>
|
||||
OPENROUTER_API_KEY=<key>
|
||||
|
||||
AZURE_API_KEY=<your-key-goes-here>
|
||||
AZURE_API_KEY=<key>
|
||||
AZURE_API_VERSION=2023-05-15
|
||||
AZURE_API_BASE=https://example-endpoint.openai.azure.com
|
||||
|
||||
|
|
|
@ -54,7 +54,7 @@ diskcache==5.6.3
|
|||
# via -r requirements.in
|
||||
distro==1.9.0
|
||||
# via openai
|
||||
filelock==3.13.4
|
||||
filelock==3.14.0
|
||||
# via huggingface-hub
|
||||
frozenlist==1.4.1
|
||||
# via
|
||||
|
@ -70,7 +70,7 @@ gitpython==3.1.43
|
|||
# streamlit
|
||||
google-ai-generativelanguage==0.6.2
|
||||
# via google-generativeai
|
||||
google-api-core[grpc]==2.18.0
|
||||
google-api-core[grpc]==2.19.0
|
||||
# via
|
||||
# google-ai-generativelanguage
|
||||
# google-api-python-client
|
||||
|
@ -96,7 +96,7 @@ greenlet==3.0.3
|
|||
# via playwright
|
||||
grep-ast==0.2.4
|
||||
# via -r requirements.in
|
||||
grpcio==1.62.2
|
||||
grpcio==1.63.0
|
||||
# via
|
||||
# google-api-core
|
||||
# grpcio-status
|
||||
|
@ -112,7 +112,7 @@ httplib2==0.22.0
|
|||
# google-auth-httplib2
|
||||
httpx==0.27.0
|
||||
# via openai
|
||||
huggingface-hub==0.22.2
|
||||
huggingface-hub==0.23.0
|
||||
# via tokenizers
|
||||
idna==3.7
|
||||
# via
|
||||
|
@ -127,13 +127,13 @@ jinja2==3.1.3
|
|||
# altair
|
||||
# litellm
|
||||
# pydeck
|
||||
jsonschema==4.21.1
|
||||
jsonschema==4.22.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# altair
|
||||
jsonschema-specifications==2023.12.1
|
||||
# via jsonschema
|
||||
litellm==1.35.23
|
||||
litellm==1.35.35
|
||||
# via -r requirements.in
|
||||
markdown-it-py==3.0.0
|
||||
# via rich
|
||||
|
@ -156,7 +156,7 @@ numpy==1.26.4
|
|||
# pydeck
|
||||
# scipy
|
||||
# streamlit
|
||||
openai==1.23.3
|
||||
openai==1.25.0
|
||||
# via
|
||||
# -r requirements.in
|
||||
# litellm
|
||||
|
@ -211,7 +211,7 @@ pydantic==2.7.1
|
|||
# openai
|
||||
pydantic-core==2.18.2
|
||||
# via pydantic
|
||||
pydeck==0.9.0b1
|
||||
pydeck==0.9.0
|
||||
# via streamlit
|
||||
pyee==11.1.0
|
||||
# via playwright
|
||||
|
@ -231,11 +231,11 @@ pyyaml==6.0.1
|
|||
# via
|
||||
# -r requirements.in
|
||||
# huggingface-hub
|
||||
referencing==0.35.0
|
||||
referencing==0.35.1
|
||||
# via
|
||||
# jsonschema
|
||||
# jsonschema-specifications
|
||||
regex==2024.4.16
|
||||
regex==2024.4.28
|
||||
# via tiktoken
|
||||
requests==2.31.0
|
||||
# via
|
||||
|
|
|
@ -219,3 +219,21 @@ class TestMain(TestCase):
|
|||
main(["--message", test_message], input=DummyInput(), output=DummyOutput())
|
||||
|
||||
mock_io_instance.add_to_input_history.assert_called_once_with(test_message)
|
||||
|
||||
@patch("aider.main.InputOutput")
|
||||
@patch("aider.coders.base_coder.Coder.run")
|
||||
def test_yes(self, mock_run, MockInputOutput):
|
||||
test_message = "test message"
|
||||
|
||||
main(["--yes", "--message", test_message])
|
||||
args, kwargs = MockInputOutput.call_args
|
||||
self.assertTrue(args[1])
|
||||
|
||||
@patch("aider.main.InputOutput")
|
||||
@patch("aider.coders.base_coder.Coder.run")
|
||||
def test_default_yes(self, mock_run, MockInputOutput):
|
||||
test_message = "test message"
|
||||
|
||||
main(["--message", test_message])
|
||||
args, kwargs = MockInputOutput.call_args
|
||||
self.assertEqual(args[1], None)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue