feat: use copy-paste instead of api

This commit is contained in:
Roberto Gudelj 2025-03-09 23:14:53 +00:00
parent 56b45ce1d3
commit 8bc7e32fa7
No known key found for this signature in database
4 changed files with 82 additions and 4 deletions

View file

@ -647,6 +647,12 @@ def get_parser(default_config_files, git_root):
default=False,
help="Enable automatic copy/paste of chat between aider and web UI (default: False)",
)
group.add_argument(
"--copy-paste-no-api",
action=argparse.BooleanOptionalAction,
default=False,
help="Use automatic copy/paste of chat between aider and web UI instead of API (default: False)",
)
group.add_argument(
"--apply",
metavar="FILE",

View file

@ -211,6 +211,9 @@ class Coder:
main_model = self.main_model
weak_model = main_model.weak_model
if main_model.copy_paste_no_api:
lines.append("Running in copy-paste mode instead of using API")
if weak_model is not main_model:
prefix = "Main model"
else:

View file

@ -820,6 +820,8 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
editor_model=args.editor_model,
editor_edit_format=args.editor_edit_format,
verbose=args.verbose,
copy_paste_no_api=args.copy_paste_no_api,
io=io,
)
# Check if deprecated remove_reasoning is set
@ -948,6 +950,9 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
if args.cache_prompts and args.map_refresh == "auto":
args.map_refresh = "files"
if args.copy_paste_no_api:
args.stream = False
if not main_model.streaming:
if args.stream:
io.tool_warning(

View file

@ -7,11 +7,13 @@ import os
import platform
import sys
import time
import uuid
from dataclasses import dataclass, fields
from pathlib import Path
from typing import Optional, Union
import json5
import pyperclip
import yaml
from PIL import Image
@ -304,9 +306,7 @@ model_info_manager = ModelInfoManager()
class Model(ModelSettings):
def __init__(
self, model, weak_model=None, editor_model=None, editor_edit_format=None, verbose=False
):
def __init__(self, model, weak_model=None, editor_model=None, editor_edit_format=None, verbose=False, copy_paste_no_api=False, io=None):
# Map any alias to its canonical name
model = MODEL_ALIASES.get(model, model)
@ -317,6 +317,9 @@ class Model(ModelSettings):
self.weak_model = None
self.editor_model = None
self.io = io
self.copy_paste_no_api=copy_paste_no_api
# Find the extra settings
self.extra_model_settings = next(
(ms for ms in MODEL_SETTINGS if ms.name == "aider/extra_params"), None
@ -334,7 +337,7 @@ class Model(ModelSettings):
# with minimum 1k and maximum 8k
self.max_chat_history_tokens = min(max(max_input_tokens / 16, 1024), 8192)
self.configure_model_settings(model)
self.configure_model_settings(model)
if weak_model is False:
self.weak_model_name = None
else:
@ -345,6 +348,10 @@ class Model(ModelSettings):
else:
self.get_editor_model(editor_model, editor_edit_format)
if self.copy_paste_no_api:
self.weak_model = self
self.editor_model = self
def get_model_info(self, model):
return model_info_manager.get_model_info(model)
@ -874,6 +881,9 @@ class Model(ModelSettings):
return self.name.startswith("ollama/") or self.name.startswith("ollama_chat/")
def send_completion(self, messages, functions, stream, temperature=None):
if self.copy_paste_no_api:
return self.copy_paste_completion(messages)
if os.environ.get("AIDER_SANITY_CHECK_TURNS"):
sanity_check_messages(messages)
@ -917,6 +927,57 @@ class Model(ModelSettings):
res = litellm.completion(**kwargs)
return hash_object, res
def copy_paste_completion(self, messages):
formatted_messages = "\n".join(
f"{msg['content']}" for msg in messages if msg.get("content")
)
pyperclip.copy(formatted_messages)
if self.io is not None:
self.io.tool_output(
"""✓ Request copied to clipboard
Paste into LLM web UI
Copy response back to clipboard
Monitoring clipboard for changes..."""
)
last_clipboard = pyperclip.paste()
while last_clipboard == pyperclip.paste():
time.sleep(0.5)
response = pyperclip.paste()
completion = litellm.ModelResponse(
id=f"chatcmpl-{uuid.uuid4()}",
choices=[
{
"message": {
"role": "assistant",
"content": response,
"function_call": None,
},
"finish_reason": "stop",
"index": 0,
}
],
created=int(time.time()),
model=self.name,
usage={"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0},
object="chat.completion",
)
kwargs = dict(
model=self.name,
messages=messages,
stream=False
)
key = json.dumps(kwargs, sort_keys=True).encode()
hash_object = hashlib.sha1(key)
return hash_object, completion
def simple_send_with_retries(self, messages):
from aider.exceptions import LiteLLMExceptions
@ -1047,6 +1108,9 @@ def sanity_check_models(io, main_model):
def sanity_check_model(io, model):
show = False
if model.copy_paste_no_api:
return show
if model.missing_keys:
show = True
io.tool_warning(f"Warning: {model} expects these environment variables")