mirror of
https://github.com/Aider-AI/aider.git
synced 2025-06-13 16:15:00 +00:00
offer to install aider[hf]
This commit is contained in:
parent
4e07710d4e
commit
f688c18b65
8 changed files with 60 additions and 14 deletions
|
@ -7,7 +7,7 @@ from pathlib import Path
|
|||
import git
|
||||
|
||||
from aider import models, prompts, voice
|
||||
from aider.help import Help
|
||||
from aider.help import Help, PipInstallHF
|
||||
from aider.llm import litellm
|
||||
from aider.scrape import Scraper
|
||||
from aider.utils import is_image_file
|
||||
|
@ -654,7 +654,14 @@ class Commands:
|
|||
from aider.coders import Coder
|
||||
|
||||
if not self.help:
|
||||
self.help = Help()
|
||||
try:
|
||||
self.help = Help()
|
||||
except PipInstallHF as err:
|
||||
self.io.tool_error(str(err))
|
||||
if self.io.confirm_ask("Run pip install?", default="y"):
|
||||
self.help = Help(pip_install=True)
|
||||
else:
|
||||
return
|
||||
|
||||
coder = Coder.create(
|
||||
main_model=self.coder.main_model,
|
||||
|
|
|
@ -6,7 +6,7 @@ from pathlib import Path
|
|||
|
||||
import importlib_resources
|
||||
|
||||
from aider import __version__
|
||||
from aider import __version__, utils
|
||||
from aider.dump import dump # noqa: F401
|
||||
from aider.help_pats import exclude_website_pats
|
||||
|
||||
|
@ -87,10 +87,35 @@ def get_index():
|
|||
return index
|
||||
|
||||
|
||||
class PipInstallHF(Exception):
|
||||
pass
|
||||
|
||||
|
||||
pip_install_cmd = [
|
||||
"aider[hf]",
|
||||
"--extra-index-url",
|
||||
"https://download.pytorch.org/whl/cpu",
|
||||
]
|
||||
|
||||
pip_install_error = f"""
|
||||
To use interactive /help you need to install HuggingFace embeddings:
|
||||
|
||||
pip install {' '.join(pip_install_cmd)}
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Help:
|
||||
def __init__(self):
|
||||
def __init__(self, pip_install=False):
|
||||
if pip_install:
|
||||
utils.pip_install(pip_install_cmd)
|
||||
|
||||
from llama_index.core import Settings
|
||||
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
||||
|
||||
try:
|
||||
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
||||
except ImportError:
|
||||
raise PipInstallHF(pip_install_error)
|
||||
|
||||
os.environ["TOKENIZERS_PARALLELISM"] = "true"
|
||||
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -176,3 +178,18 @@ def split_chat_history_markdown(text, include_tool=False):
|
|||
messages = [m for m in messages if m["role"] != "tool"]
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
def pip_install(args):
|
||||
cmd = [
|
||||
sys.executable,
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
]
|
||||
cmd += args
|
||||
|
||||
try:
|
||||
subprocess.run(cmd)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error running pip download: {e}")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue