From a6a53d8861d9138386e28b1aaff84cb8b77db3e7 Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Sun, 21 Apr 2024 13:03:00 -0700 Subject: [PATCH] copy --- HISTORY.md | 10 +++++++++- docs/faq.md | 7 +++---- docs/llms.md | 15 ++++++++------- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/HISTORY.md b/HISTORY.md index 81aae77a0..f46eeaff5 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,8 +1,16 @@ # Release history -### main +### v0.29.0 +- Added support for [directly connecting to Anthropic, Cohere, Gemini and many other LLM providers](https://aider.chat/docs/llms.html). +- Added `--weak-model ` which allows you to specify which model to use for commit messages and chat history summarization. +- New command line switches for working with popular models: + - `--4-turbo-vision` + - `--opus` + - `--sonnet` + - `--anthropic-api-key` +- Improved "whole" and "diff" backends to better support [Cohere's free to use Command-R+ model](https://aider.chat/docs/llms.html#cohere). - Allow `/add` of images from anywhere in the filesystem. - Fixed crash when operating in a repo in a detached HEAD state. - Fix: Use the same default model in CLI and python scripting. diff --git a/docs/faq.md b/docs/faq.md index 5126281f0..5de6e8e2e 100644 --- a/docs/faq.md +++ b/docs/faq.md @@ -121,14 +121,13 @@ import os import openai from aider.coders import Coder -# Make an openai client -client = openai.OpenAI(api_key=os.environ["OPENAI_API_KEY"]) - # This is a list of files to add to the chat fnames = ["foo.py"] +model = models.Model("gpt-4-turbo", weak_model="gpt-3.5-turbo") + # Create a coder object -coder = Coder.create(client=client, fnames=fnames) +coder = Coder.create(main_model=model, fnames=fnames) # This will execute one instruction on those files and then return coder.run("make a script that prints hello world") diff --git a/docs/llms.md b/docs/llms.md index 11b1a3fec..204f74459 100644 --- a/docs/llms.md +++ b/docs/llms.md @@ -3,11 +3,10 @@ [![connecting to many LLMs](/assets/llms.jpg)](https://aider.chat/assets/llms.jpg) -Aider works well with OpenAI's GPT 3.5, GPT-4, GPT-4 Turbo with Vision and -Anthropic's Claude 3 Opus and Sonnet. - -GPT-4 Turbo and Claude 3 Opus are recommended, as they are the very best coding assistants. -Cohere offers *free* API access to their Command-R+ model, which works well with aider +Aider works best with GPT-4 Turbo and Claude 3 Opus, +as they are the very best models for editing code. +Aider also works quite well with GPT-3.5. +Cohere offers *free* API access to their Command-R+ model, which works with aider as a *very basic* coding assistant. Aider supports connecting to almost any LLM, @@ -61,7 +60,8 @@ you could do `aider --model claude-3-opus-20240229`. ## Cohere -Cohere offers *free* API access to their Command-R+ model, which works well with aider +Cohere offers *free* API access to their Command-R+ model with reasonably +low rate limits. Command-R+ works well with aider as a *very basic* coding assistant. To work with Cohere's models, you need to provide your @@ -112,7 +112,8 @@ Aider uses the [litellm](https://docs.litellm.ai/docs/providers) package to connect to hundreds of other models. You can use `aider --model ` to use any supported model. -To explore the list of supported models you can run `aider --model `. +To explore the list of supported models you can run `aider --model ` +with a partial model name. If the supplied name is not an exact match for a known model, aider will return a list of possible matching models. For example: