mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-02 18:54:59 +00:00
Merge branch 'main' into watch
This commit is contained in:
commit
34f34879c9
3 changed files with 53 additions and 32 deletions
|
@ -1,5 +1,6 @@
|
|||
import hashlib
|
||||
import json
|
||||
import time
|
||||
|
||||
import backoff
|
||||
|
||||
|
@ -18,26 +19,32 @@ RETRY_TIMEOUT = 60
|
|||
|
||||
def retry_exceptions():
|
||||
import httpx
|
||||
import openai
|
||||
|
||||
return (
|
||||
# httpx
|
||||
httpx.ConnectError,
|
||||
httpx.RemoteProtocolError,
|
||||
httpx.ReadTimeout,
|
||||
# litellm
|
||||
litellm.exceptions.BadRequestError,
|
||||
litellm.exceptions.AuthenticationError,
|
||||
litellm.exceptions.PermissionDeniedError,
|
||||
litellm.exceptions.NotFoundError,
|
||||
litellm.exceptions.UnprocessableEntityError,
|
||||
litellm.exceptions.RateLimitError,
|
||||
litellm.exceptions.InternalServerError,
|
||||
litellm.exceptions.ContextWindowExceededError,
|
||||
litellm.exceptions.ContentPolicyViolationError,
|
||||
litellm.exceptions.APIConnectionError,
|
||||
litellm.exceptions.APIError,
|
||||
litellm.exceptions.ServiceUnavailableError,
|
||||
litellm.exceptions.Timeout,
|
||||
#
|
||||
# litellm exceptions inherit from openai exceptions
|
||||
# https://docs.litellm.ai/docs/exception_mapping
|
||||
#
|
||||
# openai.BadRequestError,
|
||||
# litellm.ContextWindowExceededError,
|
||||
# litellm.ContentPolicyViolationError,
|
||||
#
|
||||
# openai.AuthenticationError,
|
||||
# openai.PermissionDeniedError,
|
||||
# openai.NotFoundError,
|
||||
#
|
||||
openai.APITimeoutError,
|
||||
openai.UnprocessableEntityError,
|
||||
openai.RateLimitError,
|
||||
openai.APIConnectionError,
|
||||
openai.APIError,
|
||||
openai.APIStatusError,
|
||||
openai.InternalServerError,
|
||||
)
|
||||
|
||||
|
||||
|
@ -64,8 +71,6 @@ def send_completion(
|
|||
temperature=0,
|
||||
extra_params=None,
|
||||
):
|
||||
from aider.llm import litellm
|
||||
|
||||
kwargs = dict(
|
||||
model=model_name,
|
||||
messages=messages,
|
||||
|
@ -98,18 +103,27 @@ def send_completion(
|
|||
return hash_object, res
|
||||
|
||||
|
||||
@lazy_litellm_retry_decorator
|
||||
def simple_send_with_retries(model_name, messages, extra_params=None):
|
||||
try:
|
||||
kwargs = {
|
||||
"model_name": model_name,
|
||||
"messages": messages,
|
||||
"functions": None,
|
||||
"stream": False,
|
||||
"extra_params": extra_params,
|
||||
}
|
||||
retry_delay = 0.125
|
||||
while True:
|
||||
try:
|
||||
kwargs = {
|
||||
"model_name": model_name,
|
||||
"messages": messages,
|
||||
"functions": None,
|
||||
"stream": False,
|
||||
"extra_params": extra_params,
|
||||
}
|
||||
|
||||
_hash, response = send_completion(**kwargs)
|
||||
return response.choices[0].message.content
|
||||
except (AttributeError, litellm.exceptions.BadRequestError):
|
||||
return
|
||||
_hash, response = send_completion(**kwargs)
|
||||
return response.choices[0].message.content
|
||||
except retry_exceptions() as err:
|
||||
print(str(err))
|
||||
retry_delay *= 2
|
||||
if retry_delay > RETRY_TIMEOUT:
|
||||
break
|
||||
print(f"Retrying in {retry_delay:.1f} seconds...")
|
||||
time.sleep(retry_delay)
|
||||
continue
|
||||
except AttributeError:
|
||||
return
|
||||
|
|
|
@ -55,6 +55,7 @@ model_list = "\n".join(f"- {model}" for model in sorted(prefill_models))
|
|||
|
||||
cog.out(model_list)
|
||||
]]]-->
|
||||
- anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
- anthropic/claude-3-5-sonnet-20241022
|
||||
- claude-3-5-sonnet-20240620
|
||||
- claude-3-5-sonnet-20241022
|
||||
|
@ -65,6 +66,7 @@ cog.out(model_list)
|
|||
- codestral/codestral-latest
|
||||
- deepseek-chat
|
||||
- deepseek-coder
|
||||
- eu.anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
- mistral/codestral-2405
|
||||
- mistral/codestral-latest
|
||||
- mistral/codestral-mamba-latest
|
||||
|
@ -85,6 +87,7 @@ cog.out(model_list)
|
|||
- mistral/open-mixtral-8x7b
|
||||
- mistral/pixtral-12b-2409
|
||||
- openrouter/anthropic/claude-3.5-sonnet
|
||||
- us.anthropic.claude-3-5-sonnet-20241022-v2:0
|
||||
- vertex_ai/claude-3-5-sonnet-v2@20241022
|
||||
- vertex_ai/claude-3-5-sonnet@20240620
|
||||
- vertex_ai/claude-3-haiku@20240307
|
||||
|
|
|
@ -4,7 +4,7 @@ from unittest.mock import MagicMock, patch
|
|||
import httpx
|
||||
|
||||
from aider.llm import litellm
|
||||
from aider.sendchat import simple_send_with_retries
|
||||
from aider.sendchat import retry_exceptions, simple_send_with_retries
|
||||
|
||||
|
||||
class PrintCalled(Exception):
|
||||
|
@ -12,6 +12,10 @@ class PrintCalled(Exception):
|
|||
|
||||
|
||||
class TestSendChat(unittest.TestCase):
|
||||
def test_retry_exceptions(self):
|
||||
"""Test that retry_exceptions() can be called without raising errors"""
|
||||
retry_exceptions() # Should not raise any exceptions
|
||||
|
||||
@patch("litellm.completion")
|
||||
@patch("builtins.print")
|
||||
def test_simple_send_with_retries_rate_limit_error(self, mock_print, mock_completion):
|
||||
|
@ -31,7 +35,7 @@ class TestSendChat(unittest.TestCase):
|
|||
|
||||
# Call the simple_send_with_retries method
|
||||
simple_send_with_retries("model", ["message"])
|
||||
mock_print.assert_called_once()
|
||||
assert mock_print.call_count == 2
|
||||
|
||||
@patch("litellm.completion")
|
||||
@patch("builtins.print")
|
||||
|
@ -44,4 +48,4 @@ class TestSendChat(unittest.TestCase):
|
|||
|
||||
# Call the simple_send_with_retries method
|
||||
simple_send_with_retries("model", ["message"])
|
||||
mock_print.assert_called_once()
|
||||
assert mock_print.call_count == 2
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue