treat litellm.exceptions.BadRequestError as a 400 error and do not retry

This commit is contained in:
Paul Gauthier 2024-04-30 15:40:13 -07:00
parent 3469e04eb8
commit a3a4d87a0c
2 changed files with 6 additions and 8 deletions

View file

@ -11,6 +11,7 @@ from json.decoder import JSONDecodeError
from pathlib import Path
import git
import litellm
import openai
from jsonschema import Draft7Validator
from rich.console import Console, Text
@ -28,6 +29,8 @@ from aider.utils import is_image_file
from ..dump import dump # noqa: F401
litellm.suppress_debug_info = True
class MissingAPIKeyError(ValueError):
pass
@ -572,6 +575,9 @@ class Coder:
interrupted = True
except ExhaustedContextWindow:
exhausted = True
except litellm.exceptions.BadRequestError as err:
self.io.tool_error(f"BadRequestError: {err}")
return
except openai.BadRequestError as err:
if "maximum context length" in str(err):
exhausted = True

View file

@ -18,12 +18,6 @@ CACHE = None
litellm.suppress_debug_info = True
def giveup_on_recitiation(ex):
if not isinstance(ex, litellm.exceptions.BadRequestError):
return
return "RECITATION" in str(ex)
@backoff.on_exception(
backoff.expo,
(
@ -31,10 +25,8 @@ def giveup_on_recitiation(ex):
RateLimitError,
APIConnectionError,
httpx.ConnectError,
litellm.exceptions.BadRequestError,
litellm.exceptions.ServiceUnavailableError,
),
giveup=giveup_on_recitiation,
max_tries=10,
on_backoff=lambda details: print(
f"{details.get('exception','Exception')}\nRetry in {details['wait']:.1f} seconds."