From 36663cf04e0f69d96fe78a10c5a79a7797186863 Mon Sep 17 00:00:00 2001 From: Paul Gauthier Date: Sat, 17 Aug 2024 08:47:43 -0700 Subject: [PATCH] fix: Update content handling in ChatChunks class feat: Add date-based caching for prompts refactor: Simplify current date/time formatting in Coder class --- aider/coders/base_coder.py | 15 +++++++++++---- aider/models.py | 2 +- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/aider/coders/base_coder.py b/aider/coders/base_coder.py index e566a1769..4edea532c 100755 --- a/aider/coders/base_coder.py +++ b/aider/coders/base_coder.py @@ -91,7 +91,7 @@ class ChatChunks: if not messages: return - content = messages[-1] + content = messages[-1]["content"] if type(content) is str: content = dict( type="text", @@ -99,6 +99,8 @@ class ChatChunks: ) content["cache_control"] = {"type": "ephemeral"} + messages[-1]["content"] = [content] + class Coder: abs_fnames = None @@ -910,8 +912,12 @@ class Coder: if user_lang: platform_text += f"- Language: {user_lang}\n" - dt = datetime.now().astimezone().strftime("%Y-%m-%dT%H:%M:%S%z") - platform_text += f"- Current date/time: {dt}" + if self.cache_prompts: + dt = datetime.now().astimezone().strftime("%Y-%m-%d") + platform_text += f"- Current date: {dt}" + else: + dt = datetime.now().astimezone().strftime("%Y-%m-%dT%H:%M:%S%z") + platform_text += f"- Current date/time: {dt}" prompt = prompt.format( fence=self.fence, @@ -1020,7 +1026,8 @@ class Coder: if self.cache_prompts and self.main_model.cache_control: chunks.add_cache_control_headers() - return chunks.all_messages() + msgs = chunks.all_messages() + return msgs def send_message(self, inp): self.aider_edited_files = None diff --git a/aider/models.py b/aider/models.py index 18c6618a0..b355e8276 100644 --- a/aider/models.py +++ b/aider/models.py @@ -17,7 +17,7 @@ from aider.dump import dump # noqa: F401 from aider.llm import AIDER_APP_NAME, AIDER_SITE_URL, litellm DEFAULT_MODEL_NAME = "gpt-4o" -ANTHROPIC_BETA_HEADER = "max-tokens-3-5-sonnet-2024-07-15, prompt-caching-2024-07-31" +ANTHROPIC_BETA_HEADER = "max-tokens-3-5-sonnet-2024-07-15,prompt-caching-2024-07-31" OPENAI_MODELS = """ gpt-4