mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-09 22:25:00 +00:00
lint
This commit is contained in:
parent
30a3cc0847
commit
1334392418
8 changed files with 49 additions and 41 deletions
|
@ -1,7 +1,7 @@
|
|||
from .model import Model
|
||||
from .openai import OpenAIModel
|
||||
from .openrouter import OpenRouterModel
|
||||
from .model import Model
|
||||
|
||||
GPT4 = Model.create('gpt-4')
|
||||
GPT35 = Model.create('gpt-3.5-turbo')
|
||||
GPT35_16k = Model.create('gpt-3.5-turbo-16k')
|
||||
GPT4 = Model.create("gpt-4")
|
||||
GPT35 = Model.create("gpt-3.5-turbo")
|
||||
GPT35_16k = Model.create("gpt-3.5-turbo-16k")
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import openai
|
||||
import openai
|
||||
|
||||
|
||||
class Model:
|
||||
name = None
|
||||
edit_format = None
|
||||
|
@ -16,7 +18,8 @@ class Model:
|
|||
def create(cls, name):
|
||||
from .openai import OpenAIModel
|
||||
from .openrouter import OpenRouterModel
|
||||
if ("openrouter.ai" in openai.api_base):
|
||||
|
||||
if "openrouter.ai" in openai.api_base:
|
||||
return OpenRouterModel(name)
|
||||
return OpenAIModel(name)
|
||||
|
||||
|
@ -25,12 +28,12 @@ class Model:
|
|||
|
||||
@staticmethod
|
||||
def strong_model():
|
||||
return Model.create('gpt-4')
|
||||
return Model.create("gpt-4")
|
||||
|
||||
@staticmethod
|
||||
def weak_model():
|
||||
return Model.create('gpt-3.5-turbo')
|
||||
return Model.create("gpt-3.5-turbo")
|
||||
|
||||
@staticmethod
|
||||
def commit_message_models():
|
||||
return [Model.create('gpt-3.5-turbo'), Model.create('gpt-3.5-turbo-16k')]
|
||||
return [Model.create("gpt-3.5-turbo"), Model.create("gpt-3.5-turbo-16k")]
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import tiktoken
|
||||
import re
|
||||
|
||||
import tiktoken
|
||||
|
||||
from .model import Model
|
||||
|
||||
known_tokens = {
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import openai
|
||||
import tiktoken
|
||||
|
||||
from .model import Model
|
||||
|
||||
cached_model_details = None
|
||||
|
@ -7,12 +8,12 @@ cached_model_details = None
|
|||
|
||||
class OpenRouterModel(Model):
|
||||
def __init__(self, name):
|
||||
if name == 'gpt-4':
|
||||
name = 'openai/gpt-4'
|
||||
elif name == 'gpt-3.5-turbo':
|
||||
name = 'openai/gpt-3.5-turbo'
|
||||
elif name == 'gpt-3.5-turbo-16k':
|
||||
name = 'openai/gpt-3.5-turbo-16k'
|
||||
if name == "gpt-4":
|
||||
name = "openai/gpt-4"
|
||||
elif name == "gpt-3.5-turbo":
|
||||
name = "openai/gpt-3.5-turbo"
|
||||
elif name == "gpt-3.5-turbo-16k":
|
||||
name = "openai/gpt-3.5-turbo-16k"
|
||||
|
||||
self.name = name
|
||||
self.edit_format = edit_format_for_model(name)
|
||||
|
@ -24,20 +25,22 @@ class OpenRouterModel(Model):
|
|||
global cached_model_details
|
||||
if cached_model_details == None:
|
||||
cached_model_details = openai.Model.list().data
|
||||
found = next((details for details in cached_model_details if details.get('id') == name), None)
|
||||
found = next(
|
||||
(details for details in cached_model_details if details.get("id") == name), None
|
||||
)
|
||||
|
||||
if found:
|
||||
self.max_context_tokens = int(found.get('context_length'))
|
||||
self.prompt_price = round(float(found.get('pricing').get('prompt')) * 1000,6)
|
||||
self.completion_price = round(float(found.get('pricing').get('completion')) * 1000,6)
|
||||
self.max_context_tokens = int(found.get("context_length"))
|
||||
self.prompt_price = round(float(found.get("pricing").get("prompt")) * 1000, 6)
|
||||
self.completion_price = round(float(found.get("pricing").get("completion")) * 1000, 6)
|
||||
|
||||
else:
|
||||
raise ValueError(f'invalid openrouter model: {name}')
|
||||
raise ValueError(f"invalid openrouter model: {name}")
|
||||
|
||||
|
||||
# TODO run benchmarks and figure out which models support which edit-formats
|
||||
def edit_format_for_model(name):
|
||||
if any(str in name for str in ['gpt-4', 'claude-2']):
|
||||
if any(str in name for str in ["gpt-4", "claude-2"]):
|
||||
return "diff"
|
||||
|
||||
return "whole"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue