refactor: update retry exceptions to use openai instead of litellm

This commit is contained in:
Paul Gauthier 2024-10-28 14:40:42 -07:00 committed by Paul Gauthier (aider)
parent 8e2a4b47d6
commit 54d55c857b

View file

@ -18,25 +18,32 @@ RETRY_TIMEOUT = 60
def retry_exceptions():
import httpx
import openai
return (
# httpx
httpx.ConnectError,
httpx.RemoteProtocolError,
httpx.ReadTimeout,
# litellm
litellm.exceptions.AuthenticationError,
litellm.exceptions.PermissionDeniedError,
litellm.exceptions.NotFoundError,
litellm.exceptions.UnprocessableEntityError,
litellm.exceptions.RateLimitError,
litellm.exceptions.InternalServerError,
litellm.exceptions.ContextWindowExceededError,
litellm.exceptions.ContentPolicyViolationError,
litellm.exceptions.APIConnectionError,
litellm.exceptions.APIError,
litellm.exceptions.ServiceUnavailableError,
litellm.exceptions.Timeout,
#
# litellm exceptions inherit from openai exceptions
# https://docs.litellm.ai/docs/exception_mapping
#
# openai.BadRequestError,
# litellm.ContextWindowExceededError,
# litellm.ContentPolicyViolationError,
#
# openai.AuthenticationError,
# openai.PermissionDeniedError,
# openai.NotFoundError,
#
openai.APITimeoutError,
openai.UnprocessableEntityError,
openai.RateLimitError,
openai.APIConnectionError,
openai.APIError,
openai.APIStatusError,
openai.InternalServerError,
)
@ -63,8 +70,6 @@ def send_completion(
temperature=0,
extra_params=None,
):
from aider.llm import litellm
kwargs = dict(
model=model_name,
messages=messages,