mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-24 22:34:59 +00:00
Added can_prefill metadata to the anthropic models
This commit is contained in:
parent
dac12e342b
commit
e3805350c9
2 changed files with 27 additions and 8 deletions
|
@ -821,20 +821,20 @@ class Coder:
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
interrupted = True
|
interrupted = True
|
||||||
break
|
break
|
||||||
except litellm.ContextWindowExceededError as cwe_err:
|
except litellm.ContextWindowExceededError:
|
||||||
# the input is overflowing the context window
|
# The input is overflowing the context window!
|
||||||
exhausted = True
|
exhausted = True
|
||||||
dump(cwe_err)
|
|
||||||
break
|
break
|
||||||
except litellm.exceptions.BadRequestError as br_err:
|
except litellm.exceptions.BadRequestError as br_err:
|
||||||
dump(br_err)
|
|
||||||
self.io.tool_error(f"BadRequestError: {br_err}")
|
self.io.tool_error(f"BadRequestError: {br_err}")
|
||||||
return
|
return
|
||||||
except FinishReasonLength as frl_err:
|
except FinishReasonLength:
|
||||||
# finish_reason=length means 4k output limit?
|
# We hit the 4k output limit!
|
||||||
dump(frl_err)
|
if not self.main_model.can_prefill:
|
||||||
# exhausted = True
|
exhausted = True
|
||||||
|
break
|
||||||
|
|
||||||
|
# Use prefill to continue the response
|
||||||
multi_response_content += self.partial_response_content
|
multi_response_content += self.partial_response_content
|
||||||
if messages[-1]["role"] == "assistant":
|
if messages[-1]["role"] == "assistant":
|
||||||
messages[-1]["content"] = multi_response_content
|
messages[-1]["content"] = multi_response_content
|
||||||
|
|
|
@ -167,6 +167,7 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="claude-3-haiku-20240307",
|
weak_model_name="claude-3-haiku-20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
send_undo_reply=True,
|
send_undo_reply=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"openrouter/anthropic/claude-3-opus",
|
"openrouter/anthropic/claude-3-opus",
|
||||||
|
@ -174,6 +175,7 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="openrouter/anthropic/claude-3-haiku",
|
weak_model_name="openrouter/anthropic/claude-3-haiku",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
send_undo_reply=True,
|
send_undo_reply=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"claude-3-sonnet-20240229",
|
"claude-3-sonnet-20240229",
|
||||||
|
@ -187,6 +189,7 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="claude-3-haiku-20240307",
|
weak_model_name="claude-3-haiku-20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
examples_as_sys_msg=True,
|
examples_as_sys_msg=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"anthropic/claude-3-5-sonnet-20240620",
|
"anthropic/claude-3-5-sonnet-20240620",
|
||||||
|
@ -194,6 +197,7 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="claude-3-haiku-20240307",
|
weak_model_name="claude-3-haiku-20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
examples_as_sys_msg=True,
|
examples_as_sys_msg=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"openrouter/anthropic/claude-3.5-sonnet",
|
"openrouter/anthropic/claude-3.5-sonnet",
|
||||||
|
@ -201,6 +205,7 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="openrouter/anthropic/claude-3-haiku-20240307",
|
weak_model_name="openrouter/anthropic/claude-3-haiku-20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
examples_as_sys_msg=True,
|
examples_as_sys_msg=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
# Vertex AI Claude models
|
# Vertex AI Claude models
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
|
@ -208,6 +213,8 @@ MODEL_SETTINGS = [
|
||||||
"diff",
|
"diff",
|
||||||
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
|
examples_as_sys_msg=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"vertex_ai/claude-3-opus@20240229",
|
"vertex_ai/claude-3-opus@20240229",
|
||||||
|
@ -215,11 +222,13 @@ MODEL_SETTINGS = [
|
||||||
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
send_undo_reply=True,
|
send_undo_reply=True,
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"vertex_ai/claude-3-sonnet@20240229",
|
"vertex_ai/claude-3-sonnet@20240229",
|
||||||
"whole",
|
"whole",
|
||||||
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
weak_model_name="vertex_ai/claude-3-haiku@20240307",
|
||||||
|
can_prefill=True,
|
||||||
),
|
),
|
||||||
# Cohere
|
# Cohere
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
|
@ -377,6 +386,16 @@ class Model:
|
||||||
if "gpt-3.5" in model or "gpt-4" in model:
|
if "gpt-3.5" in model or "gpt-4" in model:
|
||||||
self.reminder_as_sys_msg = True
|
self.reminder_as_sys_msg = True
|
||||||
|
|
||||||
|
if "anthropic" in model:
|
||||||
|
self.can_prefill = True
|
||||||
|
|
||||||
|
if "3.5-sonnet" in model or "3-5-sonnet" in model:
|
||||||
|
self.edit_format = "diff"
|
||||||
|
self.use_repo_map = True
|
||||||
|
self.examples_as_sys_msg = True
|
||||||
|
self.can_prefill = (True,)
|
||||||
|
self.can_prefill = True
|
||||||
|
|
||||||
# use the defaults
|
# use the defaults
|
||||||
if self.edit_format == "diff":
|
if self.edit_format == "diff":
|
||||||
self.use_repo_map = True
|
self.use_repo_map = True
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue