Use the Models enum instead of .value

This commit is contained in:
Paul Gauthier 2023-06-05 17:06:34 -07:00
parent 130f73c864
commit fcd8a6df73
3 changed files with 16 additions and 15 deletions

View file

@ -72,13 +72,14 @@ class Coder:
else: else:
self.console = Console(force_terminal=True, no_color=True) self.console = Console(force_terminal=True, no_color=True)
main_model = Models(main_model)
if not self.check_model_availability(main_model): if not self.check_model_availability(main_model):
main_model = Models.GPT35.value main_model = Models.GPT35
self.main_model = main_model self.main_model = main_model
if main_model == Models.GPT35.value: if main_model == Models.GPT35:
self.io.tool_output( self.io.tool_output(
f"Using {main_model}: showing diffs and disabling ctags/repo-maps.", f"Using {main_model.value}: showing diffs and disabling ctags/repo-maps.",
) )
self.gpt_prompts = prompts.GPT35() self.gpt_prompts = prompts.GPT35()
map_tokens = 0 map_tokens = 0
@ -108,7 +109,7 @@ class Coder:
self.gpt_prompts.repo_content_prefix, self.gpt_prompts.repo_content_prefix,
) )
if main_model != Models.GPT35.value: if main_model != Models.GPT35:
if self.repo_map.has_ctags: if self.repo_map.has_ctags:
self.io.tool_output("Using ctags to build repo-map.") self.io.tool_output("Using ctags to build repo-map.")
@ -301,7 +302,7 @@ class Coder:
] ]
main_sys = self.gpt_prompts.main_system main_sys = self.gpt_prompts.main_system
if self.main_model == Models.GPT4.value: if self.main_model == Models.GPT4:
main_sys += "\n" + self.gpt_prompts.system_reminder main_sys += "\n" + self.gpt_prompts.system_reminder
messages = [ messages = [
@ -328,7 +329,7 @@ class Coder:
if edit_error: if edit_error:
return edit_error return edit_error
if self.main_model == "gpt=4" or (self.main_model == Models.GPT35.value and not edited): if self.main_model == Models.GPT4 or (self.main_model == Models.GPT35 and not edited):
# Don't add assistant messages to the history if they contain "edits" # Don't add assistant messages to the history if they contain "edits"
# Because those edits are actually fully copies of the file! # Because those edits are actually fully copies of the file!
# That wastes too much context window. # That wastes too much context window.
@ -422,7 +423,7 @@ class Coder:
def send(self, messages, model=None, silent=False): def send(self, messages, model=None, silent=False):
if not model: if not model:
model = self.main_model model = self.main_model.value
self.resp = "" self.resp = ""
interrupted = False interrupted = False
@ -720,12 +721,12 @@ class Coder:
return set(self.get_all_relative_files()) - set(self.get_inchat_relative_files()) return set(self.get_all_relative_files()) - set(self.get_inchat_relative_files())
def apply_updates(self, content): def apply_updates(self, content):
if self.main_model == Models.GPT4.value: if self.main_model == Models.GPT4:
method = self.update_files_gpt4 method = self.update_files_gpt4
elif self.main_model == Models.GPT35.value: elif self.main_model == Models.GPT35:
method = self.update_files_gpt35 method = self.update_files_gpt35
else: else:
raise ValueError(f"apply_updates() doesn't support {self.main_model}") raise ValueError(f"apply_updates() doesn't support {self.main_model.value}")
try: try:
edited = method(content) edited = method(content)

View file

@ -16,7 +16,7 @@ class Commands:
def __init__(self, io, coder): def __init__(self, io, coder):
self.io = io self.io = io
self.coder = coder self.coder = coder
self.tokenizer = tiktoken.encoding_for_model(coder.main_model) self.tokenizer = tiktoken.encoding_for_model(coder.main_model.value)
def is_command(self, inp): def is_command(self, inp):
if inp[0] == "/": if inp[0] == "/":
@ -133,7 +133,7 @@ class Commands:
print() print()
print(f"{total:6.3f} total") print(f"{total:6.3f} total")
limit = 8 if self.coder.main_model == Models.GPT4.value else 4 limit = 8 if self.coder.main_model == Models.GPT4 else 4
remaining = limit - total remaining = limit - total
print(f"{remaining:6.3f} remaining") print(f"{remaining:6.3f} remaining")
print(f"{limit:6.3f} max context window") print(f"{limit:6.3f} max context window")
@ -181,7 +181,7 @@ class Commands:
"was reset and removed from git.\n" "was reset and removed from git.\n"
) )
if self.coder.main_model != Models.GPT35.value: if self.coder.main_model != Models.GPT35:
return prompts.undo_command_reply return prompts.undo_command_reply
def cmd_diff(self, args): def cmd_diff(self, args):

View file

@ -69,7 +69,7 @@ class RepoMap:
self, self,
map_tokens=1024, map_tokens=1024,
root=None, root=None,
main_model=Models.GPT4.value, main_model=Models.GPT4,
io=None, io=None,
repo_content_prefix=None, repo_content_prefix=None,
): ):
@ -88,7 +88,7 @@ class RepoMap:
else: else:
self.has_ctags = False self.has_ctags = False
self.tokenizer = tiktoken.encoding_for_model(main_model) self.tokenizer = tiktoken.encoding_for_model(main_model.value)
self.repo_content_prefix = repo_content_prefix self.repo_content_prefix = repo_content_prefix
def get_repo_map(self, chat_files, other_files): def get_repo_map(self, chat_files, other_files):