mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-23 05:45:00 +00:00
Added validate_environment=False to tests
This commit is contained in:
parent
6863b47e6a
commit
9b26eeb9eb
7 changed files with 19 additions and 17 deletions
|
@ -132,7 +132,7 @@ class Model:
|
||||||
max_chat_history_tokens = 1024
|
max_chat_history_tokens = 1024
|
||||||
weak_model = None
|
weak_model = None
|
||||||
|
|
||||||
def __init__(self, model, weak_model=None, require_model_info=True):
|
def __init__(self, model, weak_model=None, require_model_info=True, validate_environment=True):
|
||||||
self.name = model
|
self.name = model
|
||||||
|
|
||||||
# Are all needed keys/params available?
|
# Are all needed keys/params available?
|
||||||
|
@ -141,11 +141,13 @@ class Model:
|
||||||
keys_in_environment = res.get("keys_in_environment")
|
keys_in_environment = res.get("keys_in_environment")
|
||||||
|
|
||||||
if missing_keys:
|
if missing_keys:
|
||||||
|
if validate_environment:
|
||||||
res = f"To use model {model}, please set these environment variables:"
|
res = f"To use model {model}, please set these environment variables:"
|
||||||
for key in missing_keys:
|
for key in missing_keys:
|
||||||
res += f"- {key}"
|
res += f"- {key}"
|
||||||
raise ModelEnvironmentError(res)
|
raise ModelEnvironmentError(res)
|
||||||
elif not keys_in_environment:
|
elif not keys_in_environment:
|
||||||
|
# https://github.com/BerriAI/litellm/issues/3190
|
||||||
print(f"Unable to check environment variables for model {model}")
|
print(f"Unable to check environment variables for model {model}")
|
||||||
|
|
||||||
# Do we have the model_info?
|
# Do we have the model_info?
|
||||||
|
|
|
@ -15,7 +15,7 @@ from aider.utils import ChdirTemporaryDirectory, GitTemporaryDirectory
|
||||||
|
|
||||||
class TestCoder(unittest.TestCase):
|
class TestCoder(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
def test_allowed_to_edit(self):
|
def test_allowed_to_edit(self):
|
||||||
with GitTemporaryDirectory():
|
with GitTemporaryDirectory():
|
||||||
|
|
|
@ -23,7 +23,7 @@ class TestCommands(TestCase):
|
||||||
self.tempdir = tempfile.mkdtemp()
|
self.tempdir = tempfile.mkdtemp()
|
||||||
os.chdir(self.tempdir)
|
os.chdir(self.tempdir)
|
||||||
|
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.chdir(self.original_cwd)
|
os.chdir(self.original_cwd)
|
||||||
|
|
|
@ -14,7 +14,7 @@ from aider.models import Model
|
||||||
|
|
||||||
class TestUtils(unittest.TestCase):
|
class TestUtils(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
# fuzzy logic disabled v0.11.2-dev
|
# fuzzy logic disabled v0.11.2-dev
|
||||||
def __test_replace_most_similar_chunk(self):
|
def __test_replace_most_similar_chunk(self):
|
||||||
|
|
|
@ -5,22 +5,22 @@ from aider.models import Model
|
||||||
|
|
||||||
class TestModels(unittest.TestCase):
|
class TestModels(unittest.TestCase):
|
||||||
def test_max_context_tokens(self):
|
def test_max_context_tokens(self):
|
||||||
model = Model("gpt-3.5-turbo")
|
model = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 16385)
|
self.assertEqual(model.info["max_input_tokens"], 16385)
|
||||||
|
|
||||||
model = Model("gpt-3.5-turbo-16k")
|
model = Model("gpt-3.5-turbo-16k", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 16385)
|
self.assertEqual(model.info["max_input_tokens"], 16385)
|
||||||
|
|
||||||
model = Model("gpt-3.5-turbo-1106")
|
model = Model("gpt-3.5-turbo-1106", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 16385)
|
self.assertEqual(model.info["max_input_tokens"], 16385)
|
||||||
|
|
||||||
model = Model("gpt-4")
|
model = Model("gpt-4", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 8 * 1024)
|
self.assertEqual(model.info["max_input_tokens"], 8 * 1024)
|
||||||
|
|
||||||
model = Model("gpt-4-32k")
|
model = Model("gpt-4-32k", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 32 * 1024)
|
self.assertEqual(model.info["max_input_tokens"], 32 * 1024)
|
||||||
|
|
||||||
model = Model("gpt-4-0613")
|
model = Model("gpt-4-0613", validate_environment=False)
|
||||||
self.assertEqual(model.info["max_input_tokens"], 8 * 1024)
|
self.assertEqual(model.info["max_input_tokens"], 8 * 1024)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ from aider.utils import IgnorantTemporaryDirectory
|
||||||
|
|
||||||
class TestRepoMap(unittest.TestCase):
|
class TestRepoMap(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
def test_get_repo_map(self):
|
def test_get_repo_map(self):
|
||||||
# Create a temporary directory with sample files for testing
|
# Create a temporary directory with sample files for testing
|
||||||
|
@ -156,7 +156,7 @@ print(my_function(3, 4))
|
||||||
|
|
||||||
class TestRepoMapTypescript(unittest.TestCase):
|
class TestRepoMapTypescript(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
def test_get_repo_map_typescript(self):
|
def test_get_repo_map_typescript(self):
|
||||||
# Create a temporary directory with a sample TypeScript file
|
# Create a temporary directory with a sample TypeScript file
|
||||||
|
|
|
@ -18,7 +18,7 @@ class TestWholeFileCoder(unittest.TestCase):
|
||||||
self.tempdir = tempfile.mkdtemp()
|
self.tempdir = tempfile.mkdtemp()
|
||||||
os.chdir(self.tempdir)
|
os.chdir(self.tempdir)
|
||||||
|
|
||||||
self.GPT35 = Model("gpt-3.5-turbo")
|
self.GPT35 = Model("gpt-3.5-turbo", validate_environment=False)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
os.chdir(self.original_cwd)
|
os.chdir(self.original_cwd)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue