diff --git a/aider/history.py b/aider/history.py index 9817a85cb..6aef9a701 100644 --- a/aider/history.py +++ b/aider/history.py @@ -123,7 +123,7 @@ def main(): assistant.append(line) - summarizer = ChatSummary(models.Model(models.DEFAULT_WEAK_MODEL_NAME)) + summarizer = ChatSummary(models.Model(models.DEFAULT_WEAK_MODEL_NAME, weak_model=False)) summary = summarizer.summarize(messages[-40:]) dump(summary) diff --git a/aider/models.py b/aider/models.py index 406c77cf7..fa3ca5224 100644 --- a/aider/models.py +++ b/aider/models.py @@ -139,7 +139,10 @@ class Model: self.max_chat_history_tokens = 2 * 1024 self.configure_model_settings(model) - self.get_weak_model(weak_model, require_model_info) + if weak_model is False: + self.weak_model_name = None + else: + self.get_weak_model(weak_model, require_model_info) def configure_model_settings(self, model): for ms in MODEL_SETTINGS: @@ -174,7 +177,7 @@ class Model: self.weak_model = Model( self.weak_model_name, - weak_model=self.weak_model_name, + weak_model=False, require_model_info=require_model_info, ) return self.weak_model diff --git a/aider/repo.py b/aider/repo.py index bc76e6d0d..682810b7d 100644 --- a/aider/repo.py +++ b/aider/repo.py @@ -25,7 +25,7 @@ class GitRepo: self.models = [ Model( DEFAULT_WEAK_MODEL_NAME, - weak_model=DEFAULT_WEAK_MODEL_NAME, + weak_model=False, require_model_info=False, ) ] diff --git a/docs/connect.md b/docs/connect.md index dc9b9d106..c425f08b1 100644 --- a/docs/connect.md +++ b/docs/connect.md @@ -91,6 +91,11 @@ for more information on how to populate the above configuration values. If you can make an LLM accessible via an OpenAI compatible API, you can use `--openai-api-base` to connect to a different API endpoint. +You might need to use `--no-require-model-info` to tell aider to +work with an unknown model that has no metadata available like +context size, token costs, etc. +Some minor functionality will be limited when using such models. + ## Other LLMs Aider uses the [litellm](https://docs.litellm.ai/docs/providers) package