This commit is contained in:
Paul Gauthier 2023-07-25 09:13:07 -03:00
parent 9050b9bd93
commit 05c1b3bc99
2 changed files with 13 additions and 14 deletions

View file

@ -361,16 +361,6 @@ class Coder:
self.last_keyboard_interrupt = now
def summarize_end(self):
if self.summarizer_thread is None:
return
self.summarizer_thread.join()
self.summarizer_thread = None
self.done_messages = self.summarized_done_messages
self.summarized_done_messages = None
def summarize_start(self):
if not self.summarizer.too_big(self.done_messages):
return
@ -386,6 +376,16 @@ class Coder:
self.summarized_done_messages = self.summarizer.summarize(self.done_messages)
print("done!")
def summarize_end(self):
if self.summarizer_thread is None:
return
self.summarizer_thread.join()
self.summarizer_thread = None
self.done_messages = self.summarized_done_messages
self.summarized_done_messages = None
def move_back_cur_messages(self, message):
self.done_messages += self.cur_messages
self.summarize_start()

View file

@ -9,7 +9,7 @@ from aider.sendchat import simple_send_with_retries
class ChatSummary:
def __init__(self, model=models.GPT35.name, max_tokens=128):
def __init__(self, model=models.GPT35.name, max_tokens=1024):
self.tokenizer = tiktoken.encoding_for_model(model)
self.max_tokens = max_tokens
@ -27,8 +27,8 @@ class ChatSummary:
return sized
def summarize(self, messages):
if len(messages) < 2:
return messages
if len(messages) <= 4:
return self.summarize_all(messages)
sized = self.tokenize(messages)
total = sum(tokens for tokens, _msg in sized)
@ -84,7 +84,6 @@ class ChatSummary:
summary = simple_send_with_retries(model=models.GPT35.name, messages=messages)
summary = prompts.summary_prefix + summary
dump(summary)
return [dict(role="user", content=summary)]