mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-12 23:54:59 +00:00
roughed in openai 1.x
This commit is contained in:
parent
fd34766aa9
commit
6ebc142377
15 changed files with 136 additions and 110 deletions
|
@ -341,12 +341,12 @@ class TestCoder(unittest.TestCase):
|
|||
coder = Coder.create(models.GPT4, None, mock_io)
|
||||
|
||||
# Set up the mock to raise InvalidRequestError
|
||||
mock_chat_completion_create.side_effect = openai.error.InvalidRequestError(
|
||||
mock_chat_completion_create.side_effect = openai.BadRequestError(
|
||||
"Invalid request", "param"
|
||||
)
|
||||
|
||||
# Call the run method and assert that InvalidRequestError is raised
|
||||
with self.assertRaises(openai.error.InvalidRequestError):
|
||||
with self.assertRaises(openai.BadRequestError):
|
||||
coder.run(with_message="hi")
|
||||
|
||||
def test_new_file_edit_one_commit(self):
|
||||
|
|
|
@ -24,12 +24,13 @@ class TestModels(unittest.TestCase):
|
|||
model = Model.create("gpt-4-32k-2123")
|
||||
self.assertEqual(model.max_context_tokens, 32 * 1024)
|
||||
|
||||
@patch("openai.Model.list")
|
||||
@patch("openai.resources.Models.list")
|
||||
def test_openrouter_model_properties(self, mock_model_list):
|
||||
import openai
|
||||
# import openai
|
||||
|
||||
old_base = openai.api_base
|
||||
openai.api_base = "https://openrouter.ai/api/v1"
|
||||
# old_base = openai.api_base
|
||||
# TODO: fixme
|
||||
# openai.api_base = "https://openrouter.ai/api/v1"
|
||||
mock_model_list.return_value = {
|
||||
"data": [
|
||||
{
|
||||
|
@ -49,7 +50,8 @@ class TestModels(unittest.TestCase):
|
|||
self.assertEqual(model.max_context_tokens, 8192)
|
||||
self.assertEqual(model.prompt_price, 0.06)
|
||||
self.assertEqual(model.completion_price, 0.12)
|
||||
openai.api_base = old_base
|
||||
# TODO: fixme
|
||||
# openai.api_base = old_base
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -14,7 +14,7 @@ class TestSendChat(unittest.TestCase):
|
|||
# Set up the mock to raise RateLimitError on
|
||||
# the first call and return None on the second call
|
||||
mock_chat_completion_create.side_effect = [
|
||||
openai.error.RateLimitError("Rate limit exceeded"),
|
||||
openai.RateLimitError("Rate limit exceeded"),
|
||||
None,
|
||||
]
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue