Merge branch 'main' into gemini-editblock-and-examples

This commit is contained in:
Paul Gauthier 2024-05-03 13:17:23 -07:00
commit 921c7ceb80
21 changed files with 323 additions and 91 deletions

View file

@ -1 +1 @@
__version__ = "0.30.2-dev"
__version__ = "0.31.2-dev"

View file

@ -151,6 +151,13 @@ def get_parser(default_config_files, git_root):
default=1024,
help="Max number of tokens to use for repo map, use 0 to disable (default: 1024)",
)
default_env_file = os.path.join(git_root, ".env") if git_root else ".env"
group.add_argument(
"--env-file",
metavar="ENV_FILE",
default=default_env_file,
help="Specify the .env file to load (default: .env in git root)",
)
##########
group = parser.add_argument_group("History Files")

View file

@ -66,6 +66,7 @@ class Coder:
main_model=None,
edit_format=None,
io=None,
from_coder=None,
**kwargs,
):
from . import EditBlockCoder, UnifiedDiffCoder, WholeFileCoder
@ -76,15 +77,42 @@ class Coder:
if edit_format is None:
edit_format = main_model.edit_format
if from_coder:
use_kwargs = dict(from_coder.original_kwargs) # copy orig kwargs
# If the edit format changes, we can't leave old ASSISTANT
# messages in the chat history. The old edit format will
# confused the new LLM. It may try and imitate it, disobeying
# the system prompt.
done_messages = from_coder.done_messages
if edit_format != from_coder.edit_format and done_messages:
done_messages = from_coder.summarizer.summarize_all(done_messages)
# Bring along context from the old Coder
update = dict(
fnames=from_coder.get_inchat_relative_files(),
done_messages=done_messages,
cur_messages=from_coder.cur_messages,
)
use_kwargs.update(update) # override to complete the switch
use_kwargs.update(kwargs) # override passed kwargs
kwargs = use_kwargs
if edit_format == "diff":
return EditBlockCoder(main_model, io, **kwargs)
res = EditBlockCoder(main_model, io, **kwargs)
elif edit_format == "whole":
return WholeFileCoder(main_model, io, **kwargs)
res = WholeFileCoder(main_model, io, **kwargs)
elif edit_format == "udiff":
return UnifiedDiffCoder(main_model, io, **kwargs)
res = UnifiedDiffCoder(main_model, io, **kwargs)
else:
raise ValueError(f"Unknown edit format {edit_format}")
res.original_kwargs = dict(kwargs)
return res
def get_announcements(self):
lines = []
lines.append(f"Aider v{__version__}")
@ -153,6 +181,8 @@ class Coder:
use_git=True,
voice_language=None,
aider_ignore_file=None,
cur_messages=None,
done_messages=None,
):
if not fnames:
fnames = []
@ -166,8 +196,16 @@ class Coder:
self.verbose = verbose
self.abs_fnames = set()
self.cur_messages = []
self.done_messages = []
if cur_messages:
self.cur_messages = cur_messages
else:
self.cur_messages = []
if done_messages:
self.done_messages = done_messages
else:
self.done_messages = []
self.io = io
self.stream = stream

View file

@ -5,15 +5,23 @@ import sys
from pathlib import Path
import git
import litellm
import openai
from prompt_toolkit.completion import Completion
from aider import prompts, voice
from aider import models, prompts, voice
from aider.scrape import Scraper
from aider.utils import is_image_file
from .dump import dump # noqa: F401
litellm.suppress_debug_info = True
class SwitchModel(Exception):
def __init__(self, model):
self.model = model
class Commands:
voice = None
@ -28,6 +36,30 @@ class Commands:
self.voice_language = voice_language
def cmd_model(self, args):
"Switch to a new LLM"
model_name = args.strip()
model = models.Model(model_name)
models.sanity_check_models(self.io, model)
raise SwitchModel(model)
def completions_model(self, partial):
models = litellm.model_cost.keys()
for model in models:
if partial.lower() in model.lower():
yield Completion(model, start_position=-len(partial))
def cmd_models(self, args):
"Search the list of available models"
args = args.strip()
if args:
models.print_matching_models(self.io, args)
else:
self.io.tool_output("Please provide a partial model name to search for.")
def cmd_web(self, args):
"Use headless selenium to scrape a webpage and add the content to the chat"
url = args.strip()
@ -99,6 +131,8 @@ class Commands:
matching_commands, first_word, rest_inp = res
if len(matching_commands) == 1:
return self.do_run(matching_commands[0][1:], rest_inp)
elif first_word in matching_commands:
return self.do_run(first_word[1:], rest_inp)
elif len(matching_commands) > 1:
self.io.tool_error(f"Ambiguous command: {', '.join(matching_commands)}")
else:

View file

@ -295,7 +295,7 @@ class GUI:
# stuff a bunch of vertical whitespace at the top
# to get all the chat text to the bottom
self.messages.container(height=300, border=False)
# self.messages.container(height=300, border=False)
with self.messages:
for msg in self.state.messages:

View file

@ -5,11 +5,13 @@ from pathlib import Path
import git
import litellm
from dotenv import load_dotenv
from streamlit.web import cli
from aider import __version__, models
from aider.args import get_parser
from aider.coders import Coder
from aider.commands import SwitchModel
from aider.io import InputOutput
from aider.repo import GitRepo
from aider.versioncheck import check_version
@ -217,9 +219,12 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
args.assistant_output_color = "blue"
args.code_theme = "default"
if return_coder and args.yes is None:
args.yes = True
io = InputOutput(
args.pretty,
args.yes or return_coder, # Force --yes if return_coder
args.yes,
args.input_history_file,
args.chat_history_file,
input=input,
@ -270,17 +275,7 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
return 0 if not update_available else 1
if args.models:
matches = models.fuzzy_match_models(args.models)
if matches:
io.tool_output(f'Models which match "{args.models}":')
for model in matches:
fq, m = model
if fq == m:
io.tool_output(f"- {m}")
else:
io.tool_output(f"- {m} ({fq})")
else:
io.tool_output(f'No models match "{args.models}".')
models.print_matching_models(io, args.models)
return 0
if args.git:
@ -296,6 +291,9 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
cmd_line = scrub_sensitive_info(args, cmd_line)
io.tool_output(cmd_line, log_only=True)
if args.env_file:
load_dotenv(args.env_file)
if args.anthropic_api_key:
os.environ["ANTHROPIC_API_KEY"] = args.anthropic_api_key
@ -337,6 +335,7 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
voice_language=args.voice_language,
aider_ignore_file=args.aiderignore,
)
except ValueError as err:
io.tool_error(str(err))
return 1
@ -398,7 +397,13 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
return 1
return
coder.run()
while True:
try:
coder.run()
return
except SwitchModel as switch:
coder = Coder.create(main_model=switch.model, io=io, from_coder=coder)
coder.show_announcements()
if __name__ == "__main__":

View file

@ -431,6 +431,20 @@ def fuzzy_match_models(name):
return list(zip(matching_models, matching_models))
def print_matching_models(io, search):
matches = fuzzy_match_models(search)
if matches:
io.tool_output(f'Models which match "{search}":')
for model in matches:
fq, m = model
if fq == m:
io.tool_output(f"- {m}")
else:
io.tool_output(f"- {m} ({fq})")
else:
io.tool_output(f'No models match "{search}".')
def main():
if len(sys.argv) != 2:
print("Usage: python models.py <model_name>")

View file

@ -12,6 +12,8 @@ from aider import __version__
aider_user_agent = f"Aider/{__version__} +https://aider.chat"
# Playwright is nice because it has a simple way to install dependencies on most
# platforms.
PLAYWRIGHT_INFO = """
For better web scraping, install Playwright chromium with this command in your terminal:
@ -26,12 +28,40 @@ class Scraper:
playwright_available = None
playwright_instructions_shown = False
# Public API...
def __init__(self, print_error=None):
"""
`print_error` - a function to call to print error/debug info.
"""
if print_error:
self.print_error = print_error
else:
self.print_error = print
def scrape(self, url):
"""
Scrape a url and turn it into readable markdown.
`url` - the URLto scrape.
"""
self.try_playwright()
if self.playwright_available:
content = self.scrape_with_playwright(url)
else:
content = self.scrape_with_httpx(url)
if not content:
return
self.try_pandoc()
content = self.html_to_markdown(content)
# content = html_to_text(content)
return content
# Internals...
def scrape_with_playwright(self, url):
with sync_playwright() as p:
try:
@ -88,24 +118,6 @@ class Scraper:
self.print_error(f"An error occurred: {err}")
return None
def scrape(self, url):
self.try_playwright()
if self.playwright_available:
content = self.scrape_with_playwright(url)
else:
content = self.scrape_with_httpx(url)
if not content:
return
self.try_pandoc()
content = self.html_to_markdown(content)
# content = html_to_text(content)
return content
def try_pandoc(self):
if self.pandoc_available:
return

View file

@ -25,6 +25,7 @@ litellm.suppress_debug_info = True
RateLimitError,
APIConnectionError,
httpx.ConnectError,
httpx.RemoteProtocolError,
litellm.exceptions.ServiceUnavailableError,
),
max_tries=10,