mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-05 04:05:04 +00:00
add caching of openrouter model details
This commit is contained in:
parent
a39829c5f8
commit
8fca0f27ee
1 changed files with 6 additions and 2 deletions
|
@ -1,6 +1,8 @@
|
||||||
import tiktoken
|
import tiktoken
|
||||||
from .model import Model
|
from .model import Model
|
||||||
|
|
||||||
|
cached_model_details = None
|
||||||
|
|
||||||
|
|
||||||
class OpenRouterModel(Model):
|
class OpenRouterModel(Model):
|
||||||
def __init__(self, name, openai):
|
def __init__(self, name, openai):
|
||||||
|
@ -19,8 +21,10 @@ class OpenRouterModel(Model):
|
||||||
self.tokenizer = tiktoken.get_encoding("cl100k_base")
|
self.tokenizer = tiktoken.get_encoding("cl100k_base")
|
||||||
|
|
||||||
# TODO cache the model list data to speed up using multiple models
|
# TODO cache the model list data to speed up using multiple models
|
||||||
available_models = openai.Model.list().data
|
global cached_model_details
|
||||||
found = next((details for details in available_models if details.get('id') == name), None)
|
if cached_model_details == None:
|
||||||
|
cached_model_details = openai.Model.list().data
|
||||||
|
found = next((details for details in cached_model_details if details.get('id') == name), None)
|
||||||
|
|
||||||
if found:
|
if found:
|
||||||
self.max_context_tokens = int(found.context_length)
|
self.max_context_tokens = int(found.context_length)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue