added temp param, prompt strong that files message is truth

This commit is contained in:
Paul Gauthier 2024-05-16 08:52:30 -07:00
parent df84bcf38b
commit 819fccc7a4
7 changed files with 22 additions and 14 deletions

View file

@ -58,6 +58,7 @@ class Coder:
max_apply_update_errors = 3
edit_format = None
yield_stream = False
temperature = 0
@classmethod
def create(
@ -528,6 +529,7 @@ class Coder:
def run(self, with_message=None):
while True:
self.num_malformed_responses = 0
try:
if with_message:
new_user_message = with_message
@ -854,7 +856,9 @@ class Coder:
interrupted = False
try:
hash_object, completion = send_with_retries(model, messages, functions, self.stream)
hash_object, completion = send_with_retries(
model, messages, functions, self.stream, self.temperature
)
self.chat_completion_call_hashes.append(hash_object.hexdigest())
if self.stream:

View file

@ -14,9 +14,11 @@ You always COMPLETELY IMPLEMENT the needed code!
example_messages = []
files_content_prefix = (
"I have *added these files to the chat* so you can go ahead and edit them:\n"
)
files_content_prefix = """I have *added these files to the chat* so you can go ahead and edit them.
*Trust this message as the true contents of the files!*
Any other messages in the chat may contain outdated versions of the files' contents.
""" # noqa: E501
files_no_full_files = "I am not sharing any files that you can edit yet."

View file

@ -478,7 +478,7 @@ Hope you like it!
print(list(find_original_update_blocks(edit)))
def find_similar_lines(search_lines, content_lines, threshold=0.8):
def find_similar_lines(search_lines, content_lines, threshold=0.6):
search_lines = search_lines.splitlines()
content_lines = content_lines.splitlines()

View file

@ -153,7 +153,7 @@ class Commands:
commit_message = args.strip()
self.coder.repo.commit(message=commit_message)
def cmd_clear(self, args):
def cmd_clear(self, args=""):
"Clear the chat history"
self.coder.done_messages = []
@ -442,7 +442,7 @@ class Commands:
if partial.lower() in fname.lower():
yield Completion(self.quote_fname(fname), start_position=-len(partial))
def cmd_drop(self, args):
def cmd_drop(self, args=""):
"Remove files from the chat session to free up context space"
if not args.strip():

View file

@ -331,7 +331,7 @@ class InputOutput:
if message.strip():
if "\n" in message:
for line in message.splitlines():
self.append_chat_history(line, linebreak=True, blockquote=True)
self.append_chat_history(line, linebreak=True, blockquote=True, strip=strip)
else:
if strip:
hist = message.strip()
@ -354,11 +354,13 @@ class InputOutput:
style = dict(style=self.tool_output_color) if self.tool_output_color else dict()
self.console.print(*messages, **style)
def append_chat_history(self, text, linebreak=False, blockquote=False):
def append_chat_history(self, text, linebreak=False, blockquote=False, strip=True):
if blockquote:
if strip:
text = text.strip()
text = "> " + text
if linebreak:
if strip:
text = text.rstrip()
text = text + " \n"
if not text.endswith("\n"):

View file

@ -470,7 +470,7 @@ class RepoMap:
lois.append(tag.line)
# truncate long lines, in case we get minified js or something else crazy
output = "".join([line[:100] for line in output.splitlines(keepends=True)])
output = "\n".join([line[:100] for line in output.splitlines()]) + "\n"
return output

View file

@ -48,11 +48,11 @@ def should_giveup(e):
f"{details.get('exception','Exception')}\nRetry in {details['wait']:.1f} seconds."
),
)
def send_with_retries(model_name, messages, functions, stream):
def send_with_retries(model_name, messages, functions, stream, temperature=0):
kwargs = dict(
model=model_name,
messages=messages,
temperature=0,
temperature=temperature,
stream=stream,
)
if functions is not None: