mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-24 14:25:00 +00:00
fixed test_coder
This commit is contained in:
parent
fb07b784f6
commit
23e6c4ee55
3 changed files with 15 additions and 16 deletions
|
@ -40,6 +40,7 @@ def wrap_fence(name):
|
||||||
|
|
||||||
|
|
||||||
class Coder:
|
class Coder:
|
||||||
|
client = None
|
||||||
abs_fnames = None
|
abs_fnames = None
|
||||||
repo = None
|
repo = None
|
||||||
last_aider_commit_hash = None
|
last_aider_commit_hash = None
|
||||||
|
@ -479,6 +480,7 @@ class Coder:
|
||||||
except ExhaustedContextWindow:
|
except ExhaustedContextWindow:
|
||||||
exhausted = True
|
exhausted = True
|
||||||
except openai.BadRequestError as err:
|
except openai.BadRequestError as err:
|
||||||
|
dump(err)
|
||||||
if "maximum context length" in str(err):
|
if "maximum context length" in str(err):
|
||||||
exhausted = True
|
exhausted = True
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -6,13 +6,9 @@ import openai
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
# from diskcache import Cache
|
# from diskcache import Cache
|
||||||
from openai import (
|
from openai import APIConnectionError, InternalServerError, RateLimitError
|
||||||
APIConnectionError,
|
|
||||||
APIError,
|
from aider.dump import dump # noqa: F401
|
||||||
InternalServerError,
|
|
||||||
RateLimitError,
|
|
||||||
Timeout,
|
|
||||||
)
|
|
||||||
|
|
||||||
CACHE_PATH = "~/.aider.send.cache.v1"
|
CACHE_PATH = "~/.aider.send.cache.v1"
|
||||||
CACHE = None
|
CACHE = None
|
||||||
|
@ -22,8 +18,6 @@ CACHE = None
|
||||||
@backoff.on_exception(
|
@backoff.on_exception(
|
||||||
backoff.expo,
|
backoff.expo,
|
||||||
(
|
(
|
||||||
Timeout,
|
|
||||||
APIError,
|
|
||||||
InternalServerError,
|
InternalServerError,
|
||||||
RateLimitError,
|
RateLimitError,
|
||||||
APIConnectionError,
|
APIConnectionError,
|
||||||
|
|
|
@ -331,18 +331,21 @@ class TestCoder(unittest.TestCase):
|
||||||
# both files should still be here
|
# both files should still be here
|
||||||
self.assertEqual(len(coder.abs_fnames), 2)
|
self.assertEqual(len(coder.abs_fnames), 2)
|
||||||
|
|
||||||
@patch("aider.coders.base_coder.openai.ChatCompletion.create")
|
def test_run_with_invalid_request_error(self):
|
||||||
def test_run_with_invalid_request_error(self, mock_chat_completion_create):
|
|
||||||
with ChdirTemporaryDirectory():
|
with ChdirTemporaryDirectory():
|
||||||
# Mock the IO object
|
# Mock the IO object
|
||||||
mock_io = MagicMock()
|
mock_io = MagicMock()
|
||||||
|
|
||||||
# Initialize the Coder object with the mocked IO and mocked repo
|
mock_client = MagicMock()
|
||||||
coder = Coder.create(models.GPT4, None, mock_io)
|
|
||||||
|
|
||||||
# Set up the mock to raise InvalidRequestError
|
# Initialize the Coder object with the mocked IO and mocked repo
|
||||||
mock_chat_completion_create.side_effect = openai.BadRequestError(
|
coder = Coder.create(models.GPT4, None, mock_io, client=mock_client)
|
||||||
"Invalid request", "param"
|
|
||||||
|
# Set up the mock to raise
|
||||||
|
mock_client.chat.completions.create.side_effect = openai.BadRequestError(
|
||||||
|
message="Invalid request",
|
||||||
|
response=MagicMock(),
|
||||||
|
body=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Call the run method and assert that InvalidRequestError is raised
|
# Call the run method and assert that InvalidRequestError is raised
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue