diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 617ac1642..007f03347 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -11,6 +11,7 @@ from json.decoder import JSONDecodeError from pathlib import Path import git +import litellm import openai from jsonschema import Draft7Validator from rich.console import Console, Text @@ -28,6 +29,8 @@ from aider.utils import is_image_file from ..dump import dump # noqa: F401 +litellm.suppress_debug_info = True + class MissingAPIKeyError(ValueError): pass @@ -572,6 +575,9 @@ class Coder: interrupted = True except ExhaustedContextWindow: exhausted = True + except litellm.exceptions.BadRequestError as err: + self.io.tool_error(f"BadRequestError: {err}") + return except openai.BadRequestError as err: if "maximum context length" in str(err): exhausted = True diff --git a/aider/sendchat.py b/aider/sendchat.py index fd6ade8a4..6c613d072 100644 --- a/aider/sendchat.py +++ b/aider/sendchat.py @@ -18,12 +18,6 @@ CACHE = None litellm.suppress_debug_info = True -def giveup_on_recitiation(ex): - if not isinstance(ex, litellm.exceptions.BadRequestError): - return - return "RECITATION" in str(ex) - - @backoff.on_exception( backoff.expo, ( @@ -31,10 +25,8 @@ def giveup_on_recitiation(ex): RateLimitError, APIConnectionError, httpx.ConnectError, - litellm.exceptions.BadRequestError, litellm.exceptions.ServiceUnavailableError, ), - giveup=giveup_on_recitiation, max_tries=10, on_backoff=lambda details: print( f"{details.get('exception','Exception')}\nRetry in {details['wait']:.1f} seconds."