This commit is contained in:
Paul Gauthier 2023-06-07 12:29:51 -07:00
parent 9cef379abd
commit a874d617d3
4 changed files with 15 additions and 15 deletions

View file

@ -35,12 +35,12 @@ class Coder:
def check_model_availability(self, main_model):
available_models = openai.Model.list()
model_ids = [model.id for model in available_models["data"]]
return main_model.value in model_ids
return main_model.name in model_ids
def __init__(
self,
io,
main_model=models.GPT4.value,
main_model=models.GPT4.name,
fnames=None,
pretty=True,
show_diffs=False,
@ -84,7 +84,7 @@ class Coder:
self.main_model = main_model
if main_model == models.GPT35:
self.io.tool_output(
f"Using {main_model.value} (experimental): disabling ctags/repo-maps.",
f"Using {main_model.name} (experimental): disabling ctags/repo-maps.",
)
self.gpt_prompts = prompts.GPT35()
else:
@ -435,7 +435,7 @@ class Coder:
def send(self, messages, model=None, silent=False):
if not model:
model = self.main_model.value
model = self.main_model.name
self.resp = ""
interrupted = False
@ -620,7 +620,7 @@ class Coder:
def get_commit_message(self, diffs, context):
if len(diffs) >= 4 * 1024 * 4:
self.io.tool_error(
f"Diff is too large for {models.GPT35.value} to generate a commit message."
f"Diff is too large for {models.GPT35.name} to generate a commit message."
)
return
@ -634,12 +634,12 @@ class Coder:
try:
commit_message, interrupted = self.send(
messages,
model=models.GPT35.value,
model=models.GPT35.name,
silent=True,
)
except openai.error.InvalidRequestError:
self.io.tool_error(
f"Failed to generate commit message using {models.GPT35.value} due to an invalid"
f"Failed to generate commit message using {models.GPT35.name} due to an invalid"
" request."
)
return
@ -650,7 +650,7 @@ class Coder:
if interrupted:
self.io.tool_error(
f"Unable to get commit message from {models.GPT35.value}. Use /commit to try again."
f"Unable to get commit message from {models.GPT35.name}. Use /commit to try again."
)
return
@ -780,7 +780,7 @@ class Coder:
elif self.main_model == models.GPT35:
method = self.update_files_gpt35
else:
raise ValueError(f"apply_updates() doesn't support {self.main_model.value}")
raise ValueError(f"apply_updates() doesn't support {self.main_model.name}")
try:
edited = method(content)

View file

@ -15,7 +15,7 @@ class Commands:
def __init__(self, io, coder):
self.io = io
self.coder = coder
self.tokenizer = tiktoken.encoding_for_model(coder.main_model.value)
self.tokenizer = tiktoken.encoding_for_model(coder.main_model.name)
def is_command(self, inp):
if inp[0] == "/":

View file

@ -76,15 +76,15 @@ def main(args=None, input=None, output=None):
parser.add_argument(
"--model",
metavar="MODEL",
default=models.GPT4.value,
help=f"Specify the model to use for the main chat (default: {models.GPT4.value})",
default=models.GPT4.name,
help=f"Specify the model to use for the main chat (default: {models.GPT4.name})",
)
parser.add_argument(
"-3",
action="store_const",
dest="model",
const=models.GPT35.value,
help=f"Use {models.GPT35.value} model for the main chat (not advised)",
const=models.GPT35.name,
help=f"Use {models.GPT35.name} model for the main chat (not advised)",
)
parser.add_argument(
"--pretty",

View file

@ -88,7 +88,7 @@ class RepoMap:
else:
self.has_ctags = False
self.tokenizer = tiktoken.encoding_for_model(main_model.value)
self.tokenizer = tiktoken.encoding_for_model(main_model.name)
self.repo_content_prefix = repo_content_prefix
def get_repo_map(self, chat_files, other_files):