refactor: Move LOCAL_MODEL_METADATA to ModelInfoManager as an instance attribute

This commit is contained in:
Paul Gauthier (aider) 2025-02-04 16:28:10 -08:00
parent b5cfceeed6
commit c1627612cf

View file

@ -85,8 +85,7 @@ MODEL_ALIASES = {
"r1": "deepseek/deepseek-reasoner",
"flash": "gemini/gemini-2.0-flash-exp",
}
# Model metadata loaded from metadata files (aider/resources/ and user's)
LOCAL_MODEL_METADATA = dict()
# Model metadata loaded from resources and user's files.
@dataclass
@ -130,6 +129,7 @@ class ModelInfoManager:
self.cache_dir = Path.home() / ".aider" / "caches"
self.cache_file = self.cache_dir / "model_prices_and_context_window.json"
self.content = None
self.local_model_metadata = {}
self._load_cache()
def _load_cache(self):
@ -162,7 +162,7 @@ class ModelInfoManager:
pass
def get_model_from_cached_json_db(self, model):
data = LOCAL_MODEL_METADATA.get(model)
data = self.local_model_metadata.get(model)
if data:
return data
@ -689,7 +689,7 @@ def register_litellm_models(model_fnames):
continue
# Defer registration with litellm to faster path.
LOCAL_MODEL_METADATA.update(model_def)
model_info_manager.local_model_metadata.update(model_def)
except Exception as e:
raise Exception(f"Error loading model definition from {model_fname}: {e}")