feat: Add warning for empty LLM streaming responses

This commit is contained in:
Paul Gauthier (aider) 2025-03-06 12:52:32 -08:00
parent cf089abb64
commit 012afc0708

View file

@ -1720,6 +1720,8 @@ class Coder:
raise FinishReasonLength() raise FinishReasonLength()
def show_send_output_stream(self, completion): def show_send_output_stream(self, completion):
received_content = False
for chunk in completion: for chunk in completion:
if len(chunk.choices) == 0: if len(chunk.choices) == 0:
continue continue
@ -1738,6 +1740,7 @@ class Coder:
self.partial_response_function_call[k] += v self.partial_response_function_call[k] += v
else: else:
self.partial_response_function_call[k] = v self.partial_response_function_call[k] = v
received_content = True
except AttributeError: except AttributeError:
pass pass
@ -1745,6 +1748,7 @@ class Coder:
text = chunk.choices[0].delta.content text = chunk.choices[0].delta.content
if text: if text:
self.partial_response_content += text self.partial_response_content += text
received_content = True
except AttributeError: except AttributeError:
text = None text = None
@ -1761,6 +1765,9 @@ class Coder:
sys.stdout.write(safe_text) sys.stdout.write(safe_text)
sys.stdout.flush() sys.stdout.flush()
yield text yield text
if not received_content:
self.io.tool_warning("Empty response received from LLM.")
def live_incremental_response(self, final): def live_incremental_response(self, final):
show_resp = self.render_incremental_response(final) show_resp = self.render_incremental_response(final)