removed skip-model-avail-check

This commit is contained in:
Paul Gauthier 2024-04-17 15:02:39 -07:00
parent 8e0f291a16
commit 855e787175
3 changed files with 6 additions and 37 deletions

View file

@ -63,7 +63,6 @@ class Coder:
edit_format=None, edit_format=None,
io=None, io=None,
client=None, client=None,
skip_model_availabily_check=False,
**kwargs, **kwargs,
): ):
from . import EditBlockCoder, UnifiedDiffCoder, WholeFileCoder from . import EditBlockCoder, UnifiedDiffCoder, WholeFileCoder
@ -71,15 +70,6 @@ class Coder:
if not main_model: if not main_model:
main_model = models.Model.create(models.DEFAULT_MODEL_NAME) main_model = models.Model.create(models.DEFAULT_MODEL_NAME)
if not skip_model_availabily_check and not main_model.always_available:
if not check_model_availability(io, client, main_model):
fallback_model = models.GPT35_0125
io.tool_error(
f"API key does not support {main_model.name}, falling back to"
f" {fallback_model.name}"
)
main_model = fallback_model
if edit_format is None: if edit_format is None:
edit_format = main_model.edit_format edit_format = main_model.edit_format
@ -1052,21 +1042,3 @@ class Coder:
# files changed, move cur messages back behind the files messages # files changed, move cur messages back behind the files messages
# self.move_back_cur_messages(self.gpt_prompts.files_content_local_edits) # self.move_back_cur_messages(self.gpt_prompts.files_content_local_edits)
return True return True
def check_model_availability(io, client, main_model):
try:
available_models = client.models.list()
except openai.NotFoundError:
# Azure sometimes returns 404?
# https://discord.com/channels/1131200896827654144/1182327371232186459
io.tool_error(f"Unable to list available models, proceeding with {main_model.name}")
return True
model_ids = sorted(model.id for model in available_models)
if main_model.name in model_ids:
return True
available_models = ", ".join(model_ids)
io.tool_error(f"API key supports: {available_models}")
return False

View file

@ -170,13 +170,6 @@ def main(argv=None, input=None, output=None, force_git_root=None):
default=default_model, default=default_model,
help=f"Specify the model to use for the main chat (default: {default_model})", help=f"Specify the model to use for the main chat (default: {default_model})",
) )
core_group.add_argument(
"--skip-model-availability-check",
metavar="SKIP_MODEL_AVAILABILITY_CHECK",
action=argparse.BooleanOptionalAction,
default=False,
help="Override to skip model availability check (default: False)",
)
default_4_model = "gpt-4-0613" default_4_model = "gpt-4-0613"
core_group.add_argument( core_group.add_argument(
"--4", "--4",
@ -576,12 +569,16 @@ def main(argv=None, input=None, output=None, force_git_root=None):
os.environ["OPENAI_ORGANIZATION"] = args.openai_organization_id os.environ["OPENAI_ORGANIZATION"] = args.openai_organization_id
res = litellm.validate_environment(args.model) res = litellm.validate_environment(args.model)
missing_keys = res.get("missing_keys") missing_keys = res.get("missing_keys")
if missing_keys: if missing_keys:
io.tool_error(f"To use model {args.model}, please set these environment variables:") io.tool_error(f"To use model {args.model}, please set these environment variables:")
for key in missing_keys: for key in missing_keys:
io.tool_error(f"- {key}") io.tool_error(f"- {key}")
return 1 return 1
elif not res["keys_in_environment"]:
io.tool_error(f"Unknown model {args.model}.")
return 1
main_model = models.Model.create(args.model, None) main_model = models.Model.create(args.model, None)
@ -590,7 +587,6 @@ def main(argv=None, input=None, output=None, force_git_root=None):
main_model=main_model, main_model=main_model,
edit_format=args.edit_format, edit_format=args.edit_format,
io=io, io=io,
skip_model_availabily_check=args.skip_model_availability_check,
client=None, client=None,
## ##
fnames=fnames, fnames=fnames,

View file

@ -133,7 +133,8 @@ class OpenAIModel(Model):
try: try:
self.tokenizer = tiktoken.encoding_for_model(true_name) self.tokenizer = tiktoken.encoding_for_model(true_name)
except KeyError: except KeyError:
raise ValueError(f"No known tokenizer for model: {name}") self.tokenizer = None
# raise ValueError(f"No known tokenizer for model: {name}")
model_info = self.lookup_model_info(true_name) model_info = self.lookup_model_info(true_name)
if not model_info: if not model_info: