refactored litellm to avoid duplicating workarounds

This commit is contained in:
Paul Gauthier 2024-05-08 08:05:15 -07:00
parent b249177119
commit 9ff6770a04
6 changed files with 7 additions and 20 deletions

View file

@ -11,7 +11,6 @@ from json.decoder import JSONDecodeError
from pathlib import Path from pathlib import Path
import git import git
import litellm
import openai import openai
from jsonschema import Draft7Validator from jsonschema import Draft7Validator
from rich.console import Console, Text from rich.console import Console, Text
@ -21,6 +20,7 @@ from aider import __version__, models, prompts, utils
from aider.commands import Commands from aider.commands import Commands
from aider.history import ChatSummary from aider.history import ChatSummary
from aider.io import InputOutput from aider.io import InputOutput
from aider.litellm import litellm
from aider.mdstream import MarkdownStream from aider.mdstream import MarkdownStream
from aider.repo import GitRepo from aider.repo import GitRepo
from aider.repomap import RepoMap from aider.repomap import RepoMap
@ -29,8 +29,6 @@ from aider.utils import is_image_file
from ..dump import dump # noqa: F401 from ..dump import dump # noqa: F401
litellm.suppress_debug_info = True
class MissingAPIKeyError(ValueError): class MissingAPIKeyError(ValueError):
pass pass

View file

@ -5,18 +5,16 @@ import sys
from pathlib import Path from pathlib import Path
import git import git
import litellm
import openai import openai
from prompt_toolkit.completion import Completion from prompt_toolkit.completion import Completion
from aider import models, prompts, voice from aider import models, prompts, voice
from aider.litellm import litellm
from aider.scrape import Scraper from aider.scrape import Scraper
from aider.utils import is_image_file from aider.utils import is_image_file
from .dump import dump # noqa: F401 from .dump import dump # noqa: F401
litellm.suppress_debug_info = True
class SwitchModel(Exception): class SwitchModel(Exception):
def __init__(self, model): def __init__(self, model):

View file

@ -4,7 +4,6 @@ import sys
from pathlib import Path from pathlib import Path
import git import git
import litellm
from dotenv import load_dotenv from dotenv import load_dotenv
from streamlit.web import cli from streamlit.web import cli
@ -13,15 +12,12 @@ from aider.args import get_parser
from aider.coders import Coder from aider.coders import Coder
from aider.commands import SwitchModel from aider.commands import SwitchModel
from aider.io import InputOutput from aider.io import InputOutput
from aider.litellm import litellm # noqa: F401; properly init litellm on launch
from aider.repo import GitRepo from aider.repo import GitRepo
from aider.versioncheck import check_version from aider.versioncheck import check_version
from .dump import dump # noqa: F401 from .dump import dump # noqa: F401
litellm.suppress_debug_info = True
os.environ["OR_SITE_URL"] = "http://aider.chat"
os.environ["OR_APP_NAME"] = "Aider"
def get_git_root(): def get_git_root():
"""Try and guess the git repo, since the conf.yml can be at the repo root""" """Try and guess the git repo, since the conf.yml can be at the repo root"""

View file

@ -6,12 +6,10 @@ import sys
from dataclasses import dataclass, fields from dataclasses import dataclass, fields
from typing import Optional from typing import Optional
import litellm
from PIL import Image from PIL import Image
from aider.dump import dump # noqa: F401 from aider.dump import dump # noqa: F401
from aider.litellm import litellm
litellm.suppress_debug_info = True
DEFAULT_MODEL_NAME = "gpt-4-1106-preview" DEFAULT_MODEL_NAME = "gpt-4-1106-preview"

View file

@ -3,20 +3,18 @@ import json
import backoff import backoff
import httpx import httpx
import litellm
import openai import openai
# from diskcache import Cache # from diskcache import Cache
from openai import APIConnectionError, InternalServerError, RateLimitError from openai import APIConnectionError, InternalServerError, RateLimitError
from aider.dump import dump # noqa: F401 from aider.dump import dump # noqa: F401
from aider.litellm import litellm
CACHE_PATH = "~/.aider.send.cache.v1" CACHE_PATH = "~/.aider.send.cache.v1"
CACHE = None CACHE = None
# CACHE = Cache(CACHE_PATH) # CACHE = Cache(CACHE_PATH)
litellm.suppress_debug_info = True
def should_giveup(e): def should_giveup(e):
if not hasattr(e, "status_code"): if not hasattr(e, "status_code"):

View file

@ -3,9 +3,10 @@ import queue
import tempfile import tempfile
import time import time
import litellm
import numpy as np import numpy as np
from aider.litellm import litellm
try: try:
import soundfile as sf import soundfile as sf
except (OSError, ModuleNotFoundError): except (OSError, ModuleNotFoundError):
@ -15,8 +16,6 @@ from prompt_toolkit.shortcuts import prompt
from .dump import dump # noqa: F401 from .dump import dump # noqa: F401
litellm.suppress_debug_info = True
class SoundDeviceError(Exception): class SoundDeviceError(Exception):
pass pass