mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-22 21:34:59 +00:00
chore: remove CACHE logic from sendchat and models files
This commit is contained in:
parent
db694b20df
commit
c3beaedaa6
2 changed files with 1 additions and 12 deletions
|
@ -551,13 +551,8 @@ class Model(ModelSettings):
|
|||
key = json.dumps(kwargs, sort_keys=True).encode()
|
||||
# dump(kwargs)
|
||||
hash_object = hashlib.sha1(key)
|
||||
from aider.sendchat import CACHE, litellm
|
||||
|
||||
if not stream and CACHE is not None and key in CACHE:
|
||||
return hash_object, CACHE[key]
|
||||
from aider.sendchat import litellm
|
||||
res = litellm.completion(**kwargs)
|
||||
if not stream and CACHE is not None:
|
||||
CACHE[key] = res
|
||||
return hash_object, res
|
||||
|
||||
def simple_send_with_retries(self, messages):
|
||||
|
|
|
@ -1,12 +1,6 @@
|
|||
from aider.dump import dump # noqa: F401
|
||||
from aider.utils import format_messages
|
||||
|
||||
# from diskcache import Cache
|
||||
|
||||
|
||||
CACHE_PATH = "~/.aider.send.cache.v1"
|
||||
CACHE = None
|
||||
# CACHE = Cache(CACHE_PATH)
|
||||
|
||||
RETRY_TIMEOUT = 60
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue