From a3a4d87a0ce4aa72c9e1cc33ac9ab353fbc1b83d Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Tue, 30 Apr 2024 15:40:13 -0700 Subject: [PATCH] treat litellm.exceptions.BadRequestError as a 400 error and do not retry --- aider/coders/base_coder.py | 6 ++++++ aider/sendchat.py | 8 -------- 2 files changed, 6 insertions(+), 8 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index 617ac1642..007f03347 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -11,6 +11,7 @@ from json.decoder import JSONDecodeError from pathlib import Path import git +import litellm import openai from jsonschema import Draft7Validator from rich.console import Console, Text @@ -28,6 +29,8 @@ from aider.utils import is_image_file from ..dump import dump # noqa: F401 +litellm.suppress_debug_info = True + class MissingAPIKeyError(ValueError): pass @@ -572,6 +575,9 @@ class Coder: interrupted = True except ExhaustedContextWindow: exhausted = True + except litellm.exceptions.BadRequestError as err: + self.io.tool_error(f"BadRequestError: {err}") + return except openai.BadRequestError as err: if "maximum context length" in str(err): exhausted = True diff --git a/aider/sendchat.py b/aider/sendchat.py index fd6ade8a4..6c613d072 100644 --- a/aider/sendchat.py +++ b/aider/sendchat.py @@ -18,12 +18,6 @@ CACHE = None litellm.suppress_debug_info = True -def giveup_on_recitiation(ex): - if not isinstance(ex, litellm.exceptions.BadRequestError): - return - return "RECITATION" in str(ex) - - @backoff.on_exception( backoff.expo, ( @@ -31,10 +25,8 @@ def giveup_on_recitiation(ex): RateLimitError, APIConnectionError, httpx.ConnectError, - litellm.exceptions.BadRequestError, litellm.exceptions.ServiceUnavailableError, ), - giveup=giveup_on_recitiation, max_tries=10, on_backoff=lambda details: print( f"{details.get('exception','Exception')}\nRetry in {details['wait']:.1f} seconds."