diff --git a/README.md b/README.md index 1cdffa615..81c52ac6d 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ You can find more chat transcripts on the [examples page](https://aider.chat/exa * As an environment variable: * `export OPENAI_API_KEY=sk-...` on Linux or Mac * `setx OPENAI_API_KEY sk-...` in Windows PowerShell - * Or include `openai-api-key: sk-...` in an `.aider.config.yml` file in your home directory or at the root of your git repo, alongside the `.git` dir. + * Or include `openai-api-key: sk-...` in an `.aider.conf.yml` file in your home directory or at the root of your git repo, alongside the `.git` dir. 3. Optionally, install [universal ctags](https://github.com/universal-ctags/ctags). This is helpful if you plan to use aider and GPT-4 with repositories that have more than a handful of files. This allows aider to build a [map of your entire git repo](https://aider.chat/docs/ctags.html) and share it with GPT to help it better understand and modify large codebases. * The `ctags` command needs to be on your shell path so that it will run by default when aider invokes `ctags ...`. diff --git a/aider/coders/wholefile_coder.py b/aider/coders/wholefile_coder.py index 8c8536f04..3439797bb 100644 --- a/aider/coders/wholefile_coder.py +++ b/aider/coders/wholefile_coder.py @@ -30,7 +30,10 @@ class WholeFileCoder(Coder): return context def render_incremental_response(self, final): - return self.update_files(mode="diff") + try: + return self.update_files(mode="diff") + except ValueError: + return self.partial_response_content def update_files(self, mode="update"): content = self.partial_response_content diff --git a/aider/main.py b/aider/main.py index 9e614ecee..d1ccb232f 100644 --- a/aider/main.py +++ b/aider/main.py @@ -270,7 +270,9 @@ def main(args=None, input=None, output=None): "No OpenAI API key provided. Use --openai-api-key or setx OPENAI_API_KEY." ) else: - io.tool_error("No OpenAI API key provided. Use --openai-api-key or env OPENAI_API_KEY.") + io.tool_error( + "No OpenAI API key provided. Use --openai-api-key or export OPENAI_API_KEY." + ) return 1 main_model = models.Model(args.model) diff --git a/tests/test_main.py b/tests/test_main.py index 5fd752ca7..126d6827f 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -9,6 +9,7 @@ from unittest.mock import patch from prompt_toolkit.input import DummyInput from prompt_toolkit.output import DummyOutput +from aider.dump import dump # noqa: F401 from aider.main import main @@ -41,6 +42,23 @@ class TestMain(TestCase): main(["--yes", "foo.txt"], input=DummyInput(), output=DummyOutput()) self.assertTrue(os.path.exists("foo.txt")) + def test_main_with_git_config_yml(self): + subprocess.run(["git", "init"]) + subprocess.run(["git", "config", "user.email", "dummy@example.com"]) + subprocess.run(["git", "config", "user.name", "Dummy User"]) + + Path(".aider.conf.yml").write_text("no-auto-commits: true\n") + with patch("aider.main.Coder.create") as MockCoder: + main([], input=DummyInput(), output=DummyOutput()) + _, kwargs = MockCoder.call_args + assert kwargs["auto_commits"] is False + + Path(".aider.conf.yml").write_text("auto-commits: true\n") + with patch("aider.main.Coder.create") as MockCoder: + main([], input=DummyInput(), output=DummyOutput()) + _, kwargs = MockCoder.call_args + assert kwargs["auto_commits"] is True + def test_main_with_empty_git_dir_new_subdir_file(self): subprocess.run(["git", "init"]) subprocess.run(["git", "config", "user.email", "dummy@example.com"]) diff --git a/tests/test_wholefile.py b/tests/test_wholefile.py index de85e04c8..bd5a53139 100644 --- a/tests/test_wholefile.py +++ b/tests/test_wholefile.py @@ -27,6 +27,31 @@ class TestWholeFileCoder(unittest.TestCase): self.patcher.stop() + def test_no_files(self): + # Initialize WholeFileCoder with the temporary directory + io = InputOutput(yes=True) + + coder = WholeFileCoder(main_model=models.GPT35, io=io, fnames=[]) + coder.partial_response_content = ( + 'To print "Hello, World!" in most programming languages, you can use the following' + ' code:\n\n```python\nprint("Hello, World!")\n```\n\nThis code will output "Hello,' + ' World!" to the console.' + ) + + # This is throwing ValueError! + coder.render_incremental_response(True) + + def test_no_files_new_file_should_ask(self): + io = InputOutput(yes=False) # <- yes=FALSE + coder = WholeFileCoder(main_model=models.GPT35, io=io, fnames=[]) + coder.partial_response_content = ( + 'To print "Hello, World!" in most programming languages, you can use the following' + ' code:\n\nfoo.js\n```python\nprint("Hello, World!")\n```\n\nThis code will output' + ' "Hello, World!" to the console.' + ) + coder.update_files() + self.assertFalse(Path("foo.js").exists()) + def test_update_files(self): # Create a sample file in the temporary directory sample_file = "sample.txt"