mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-25 06:44:59 +00:00
Advise the user when chat history is being summarized
This commit is contained in:
parent
844194c268
commit
0db9d210d7
2 changed files with 18 additions and 9 deletions
|
@ -360,7 +360,9 @@ class Coder:
|
||||||
|
|
||||||
def move_back_cur_messages(self, message):
|
def move_back_cur_messages(self, message):
|
||||||
self.done_messages += self.cur_messages
|
self.done_messages += self.cur_messages
|
||||||
self.done_messages = self.summarizer.summarize(self.done_messages)
|
if self.summarizer.too_big(self.done_messages):
|
||||||
|
self.io.tool_output("Summarizing chat history...")
|
||||||
|
self.done_messages = self.summarizer.summarize(self.done_messages)
|
||||||
|
|
||||||
if message:
|
if message:
|
||||||
self.done_messages += [
|
self.done_messages += [
|
||||||
|
|
|
@ -13,20 +13,27 @@ class ChatSummary:
|
||||||
self.tokenizer = tiktoken.encoding_for_model(model)
|
self.tokenizer = tiktoken.encoding_for_model(model)
|
||||||
self.max_tokens = max_tokens
|
self.max_tokens = max_tokens
|
||||||
|
|
||||||
|
def too_big(self, messages):
|
||||||
|
sized = self.tokenize(messages)
|
||||||
|
total = sum(tokens for tokens, _msg in sized)
|
||||||
|
dump(total, self.max_tokens)
|
||||||
|
return total > self.max_tokens
|
||||||
|
|
||||||
|
def tokenize(self, messages):
|
||||||
|
sized = []
|
||||||
|
for msg in messages:
|
||||||
|
tokens = len(self.tokenizer.encode(json.dumps(msg)))
|
||||||
|
sized.append((tokens, msg))
|
||||||
|
return sized
|
||||||
|
|
||||||
def summarize(self, messages):
|
def summarize(self, messages):
|
||||||
num = len(messages)
|
num = len(messages)
|
||||||
dump(num)
|
dump(num)
|
||||||
if num < 2:
|
if num < 2:
|
||||||
return messages
|
return messages
|
||||||
|
|
||||||
total = 0
|
sized = self.tokenize(messages)
|
||||||
sized = []
|
total = sum(tokens for tokens, _msg in sized)
|
||||||
for msg in messages:
|
|
||||||
tokens = len(self.tokenizer.encode(json.dumps(msg)))
|
|
||||||
sized.append((tokens, msg))
|
|
||||||
total += tokens
|
|
||||||
|
|
||||||
dump(total, self.max_tokens)
|
|
||||||
if total <= self.max_tokens:
|
if total <= self.max_tokens:
|
||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue