mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-01 02:05:00 +00:00
fix: Use temperature setting from model configuration
This commit is contained in:
parent
8aee4d25ed
commit
1755d2e0f4
3 changed files with 11 additions and 2 deletions
|
@ -1381,6 +1381,11 @@ class Coder:
|
||||||
|
|
||||||
self.io.log_llm_history("TO LLM", format_messages(messages))
|
self.io.log_llm_history("TO LLM", format_messages(messages))
|
||||||
|
|
||||||
|
if self.main_model.use_temperature:
|
||||||
|
temp = self.temperature
|
||||||
|
else:
|
||||||
|
temp = None
|
||||||
|
|
||||||
completion = None
|
completion = None
|
||||||
try:
|
try:
|
||||||
hash_object, completion = send_completion(
|
hash_object, completion = send_completion(
|
||||||
|
@ -1388,7 +1393,7 @@ class Coder:
|
||||||
messages,
|
messages,
|
||||||
functions,
|
functions,
|
||||||
self.stream,
|
self.stream,
|
||||||
self.temperature,
|
temp,
|
||||||
extra_headers=model.extra_headers,
|
extra_headers=model.extra_headers,
|
||||||
max_tokens=model.max_tokens,
|
max_tokens=model.max_tokens,
|
||||||
)
|
)
|
||||||
|
|
|
@ -78,6 +78,7 @@ class ModelSettings:
|
||||||
cache_control: bool = False
|
cache_control: bool = False
|
||||||
caches_by_default: bool = False
|
caches_by_default: bool = False
|
||||||
use_system_prompt: bool = True
|
use_system_prompt: bool = True
|
||||||
|
use_temperature: bool = True
|
||||||
|
|
||||||
|
|
||||||
# https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo
|
# https://platform.openai.com/docs/models/gpt-4-and-gpt-4-turbo
|
||||||
|
@ -433,6 +434,7 @@ MODEL_SETTINGS = [
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
reminder="user",
|
reminder="user",
|
||||||
use_system_prompt=False,
|
use_system_prompt=False,
|
||||||
|
use_temperature=False,
|
||||||
),
|
),
|
||||||
ModelSettings(
|
ModelSettings(
|
||||||
"o1-mini",
|
"o1-mini",
|
||||||
|
@ -441,6 +443,7 @@ MODEL_SETTINGS = [
|
||||||
use_repo_map=True,
|
use_repo_map=True,
|
||||||
reminder="user",
|
reminder="user",
|
||||||
use_system_prompt=False,
|
use_system_prompt=False,
|
||||||
|
use_temperature=False,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -60,9 +60,10 @@ def send_completion(
|
||||||
kwargs = dict(
|
kwargs = dict(
|
||||||
model=model_name,
|
model=model_name,
|
||||||
messages=messages,
|
messages=messages,
|
||||||
# temperature=temperature,
|
|
||||||
stream=stream,
|
stream=stream,
|
||||||
)
|
)
|
||||||
|
if temperature is not None:
|
||||||
|
kwargs["temperature"] = temperature
|
||||||
|
|
||||||
if functions is not None:
|
if functions is not None:
|
||||||
function = functions[0]
|
function = functions[0]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue