Support weak_model=False

This commit is contained in:
Paul Gauthier 2024-04-19 14:04:10 -07:00
parent f81b62dfea
commit 4e50f0d095
4 changed files with 12 additions and 4 deletions

View file

@ -123,7 +123,7 @@ def main():
assistant.append(line)
summarizer = ChatSummary(models.Model(models.DEFAULT_WEAK_MODEL_NAME))
summarizer = ChatSummary(models.Model(models.DEFAULT_WEAK_MODEL_NAME, weak_model=False))
summary = summarizer.summarize(messages[-40:])
dump(summary)

View file

@ -139,7 +139,10 @@ class Model:
self.max_chat_history_tokens = 2 * 1024
self.configure_model_settings(model)
self.get_weak_model(weak_model, require_model_info)
if weak_model is False:
self.weak_model_name = None
else:
self.get_weak_model(weak_model, require_model_info)
def configure_model_settings(self, model):
for ms in MODEL_SETTINGS:
@ -174,7 +177,7 @@ class Model:
self.weak_model = Model(
self.weak_model_name,
weak_model=self.weak_model_name,
weak_model=False,
require_model_info=require_model_info,
)
return self.weak_model

View file

@ -25,7 +25,7 @@ class GitRepo:
self.models = [
Model(
DEFAULT_WEAK_MODEL_NAME,
weak_model=DEFAULT_WEAK_MODEL_NAME,
weak_model=False,
require_model_info=False,
)
]

View file

@ -91,6 +91,11 @@ for more information on how to populate the above configuration values.
If you can make an LLM accessible via an OpenAI compatible API,
you can use `--openai-api-base` to connect to a different API endpoint.
You might need to use `--no-require-model-info` to tell aider to
work with an unknown model that has no metadata available like
context size, token costs, etc.
Some minor functionality will be limited when using such models.
## Other LLMs
Aider uses the [litellm](https://docs.litellm.ai/docs/providers) package