add support to load/override model settings

This commit is contained in:
Krazer 2024-06-11 17:15:48 -05:00
parent 6216a273fe
commit 486c5ce65c
5 changed files with 131 additions and 36 deletions

View file

@ -141,9 +141,15 @@ def get_parser(default_config_files, git_root):
env_var="OPENAI_ORGANIZATION_ID",
help="Specify the OpenAI organization ID",
)
group.add_argument(
"--model-settings-file",
metavar="MODEL_SETTINGS_FILE",
default=None,
help="Specify a file with aider model settings for unknown models",
)
group.add_argument(
"--model-metadata-file",
metavar="MODEL_FILE",
metavar="MODEL_METADATA_FILE",
default=None,
help="Specify a file with context window and costs for unknown models",
)

View file

@ -205,7 +205,48 @@ def parse_lint_cmds(lint_cmds, io):
return
return res
def generate_search_path_list(default_fname, git_root, command_line_file):
files = []
default_file = Path(default_fname)
files.append(Path.home() / default_file) # homedir
if git_root:
files.append(Path(git_root) / default_file) # git root
if command_line_file:
files.append(command_line_file)
files.append(default_file.resolve())
files = list(map(str, files))
files = list(dict.fromkeys(files))
return files
def register_models(git_root, model_settings_fname, io):
model_settings_files = generate_search_path_list(".aider.models.yml", git_root, model_settings_fname)
try:
files_loaded = models.register_models(model_settings_files)
if len(files_loaded) > 0:
io.tool_output(f"Loaded {len(files_loaded)} model settings file(s)")
for file_loaded in files_loaded:
io.tool_output(f" - {file_loaded}")
except Exception as e:
io.tool_error(f"Error loading aider model settings: {e}")
return 1
return None
def register_litellm_models(git_root, model_metadata_fname, io):
model_metatdata_files = generate_search_path_list(".aider.litellm.models.json", git_root, model_metadata_fname)
try:
model_metadata_files_loaded = models.register_litellm_models(model_metatdata_files)
if len(model_metadata_files_loaded) > 0:
io.tool_output(f"Loaded {len(model_metadata_files_loaded)} litellm model file(s)")
for model_metadata_file in model_metadata_files_loaded:
io.tool_output(f" - {model_metadata_file}")
except Exception as e:
io.tool_error(f"Error loading litellm models: {e}")
return 1
def main(argv=None, input=None, output=None, force_git_root=None, return_coder=False):
if argv is None:
argv = sys.argv[1:]
@ -336,26 +377,9 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
if args.openai_organization_id:
os.environ["OPENAI_ORGANIZATION"] = args.openai_organization_id
model_def_files = []
model_def_fname = Path(".aider.models.json")
model_def_files.append(Path.home() / model_def_fname) # homedir
if git_root:
model_def_files.append(Path(git_root) / model_def_fname) # git root
if args.model_metadata_file:
model_def_files.append(args.model_metadata_file)
model_def_files.append(model_def_fname.resolve())
model_def_files = list(map(str, model_def_files))
model_def_files = list(dict.fromkeys(model_def_files))
try:
model_metadata_files_loaded = models.register_models(model_def_files)
if len(model_metadata_files_loaded) > 0:
io.tool_output(f"Loaded {len(model_metadata_files_loaded)} model file(s)")
for model_metadata_file in model_metadata_files_loaded:
io.tool_output(f" - {model_metadata_file}")
except Exception as e:
io.tool_error(f"Error loading model info/cost: {e}")
return 1
register_models(git_root, args.model_settings_file, io)
register_litellm_models(git_root, args.model_metadata_file, io)
main_model = models.Model(args.model, weak_model=args.weak_model)
lint_cmds = parse_lint_cmds(args.lint_cmd, io)

View file

@ -1,5 +1,6 @@
import difflib
import json
import yaml
import math
import os
import sys
@ -425,23 +426,47 @@ class Model:
return validate_variables(["GROQ_API_KEY"])
return res
def register_models(model_def_fnames):
model_metadata_files_loaded = []
for model_def_fname in model_def_fnames:
if not os.path.exists(model_def_fname):
def register_models(model_settings_fnames):
files_loaded = []
for model_settings_fname in model_settings_fnames:
if not os.path.exists(model_settings_fname):
continue
model_metadata_files_loaded.append(model_def_fname)
try:
with open(model_def_fname, "r") as model_def_file:
with open(model_settings_fname, "r") as model_settings_file:
model_settings_list = yaml.safe_load(model_settings_file)
for model_settings_dict in model_settings_list:
model_settings = ModelSettings(**model_settings_dict)
existing_model_settings = next((ms for ms in MODEL_SETTINGS if ms.name == model_settings.name), None)
if existing_model_settings:
MODEL_SETTINGS.remove(existing_model_settings)
MODEL_SETTINGS.append(model_settings)
except Exception as e:
raise Exception(f"Error loading model settings from {model_settings_fname}: {e}")
files_loaded.append(model_settings_fname)
return files_loaded
def register_litellm_models(model_fnames):
files_loaded = []
for model_fname in model_fnames:
if not os.path.exists(model_fname):
continue
try:
with open(model_fname, "r") as model_def_file:
model_def = json.load(model_def_file)
except json.JSONDecodeError as e:
raise Exception(f"Error loading model definition from {model_def_fname}: {e}")
litellm.register_model(model_def)
except Exception as e:
raise Exception(f"Error loading model definition from {model_fname}: {e}")
files_loaded.append(model_fname)
litellm.register_model(model_def)
return model_metadata_files_loaded
return files_loaded
def validate_variables(vars):