From 240d1fa7274761079c374ef71c7032c0d2777581 Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Tue, 9 May 2023 09:51:30 -0700 Subject: [PATCH] Remove custom style from prompt function call in coder.py to fix prompt_toolkit rendering issue. # Context: USER: Traceback (most recent call last): File "/Users/gauthier/Projects/chatgpt/./coder.py", line 496, in status = main() File "/Users/gauthier/Projects/chatgpt/main.py", line 76, in main coder.run() File "/Users/gauthier/Projects/chatgpt/coder.py", line 206, in run self.run_loop() File "/Users/gauthier/Projects/chatgpt/coder.py", line 217, in run_loop inp = self.get_input() File "/Users/gauthier/Projects/chatgpt/coder.py", line 158, in get_input line = prompt( File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/shortcuts/prompt.py", line 1420, in prompt return session.prompt( File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/shortcuts/prompt.py", line 1035, in prompt return self.app.run( File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/application/application.py", line 961, in run return loop.run_until_complete(coro) File "/usr/local/Cellar/python@3.9/3.9.16/Frameworks/Python.framework/Versions/3.9/lib/python3.9/asyncio/base_events.py", line 647, in run_until_complete return future.result() File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/application/application.py", line 875, in run_async return await _run_async(f) File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/application/application.py", line 733, in _run_async self._redraw() File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/application/application.py", line 548, in _redraw self.context.copy().run(run_in_context) File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/application/application.py", line 531, in run_in_context self.renderer.render(self, self.layout) File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/renderer.py", line 652, in render self.style.invalidation_hash() != self._last_style_hash File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/styles/style.py", line 400, in invalidation_hash return tuple(s.invalidation_hash() for s in self.styles) File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/styles/style.py", line 400, in return tuple(s.invalidation_hash() for s in self.styles) File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/styles/base.py", line 179, in invalidation_hash return (self.get_style() or self._dummy).invalidation_hash() File "/Users/gauthier/Projects/chatgpt/.venv/lib/python3.9/site-packages/prompt_toolkit/styles/base.py", line 179, in invalidation_hash return (self.get_style() or self._dummy).invalidation_hash() AttributeError: 'str' object has no attribute 'invalidation_hash' ASSISTANT: It seems like there is an issue with the `prompt_toolkit` library when trying to render the prompt. The error is related to the `invalidation_hash` method in the `style.py` file. To fix this issue, we can update the `get_input` method in the `coder.py` file to use a default style instead of the custom one. Here's the change you need to make in the `coder.py` file: ```python coder.py <<<<<<< ORIGINAL line = prompt( show, completer=completer_instance, history=FileHistory(self.history_file), style="fg:green", ) ======= line = prompt( show, completer=completer_instance, history=FileHistory(self.history_file), ) >>>>>>> UPDATED ``` This change removes the custom style (`style="fg:green"`) from the `prompt` function call, which should resolve the `AttributeError` you encountered. --- coder.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/coder.py b/coder.py index 4c9581ba6..44004cb14 100755 --- a/coder.py +++ b/coder.py @@ -155,12 +155,11 @@ class Coder: show = ". " else: show = "> " - line = prompt( - show, - completer=completer_instance, - history=FileHistory(self.history_file), - style="fg:green", - ) + line = prompt( + show, + completer=completer_instance, + history=FileHistory(self.history_file), + ) except EOFError: return if line.strip() == "{" and not multiline_input: @@ -494,4 +493,4 @@ if __name__ == "__main__": from main import main status = main() - sys.exit(status) \ No newline at end of file + sys.exit(status)