Merge branch 'main' into slim-playwright

This commit is contained in:
Paul Gauthier 2024-07-14 18:29:39 +01:00
commit a3a4113331
27 changed files with 470 additions and 310 deletions

View file

@ -91,7 +91,7 @@ class YamlHelpFormatter(argparse.HelpFormatter):
def _format_text(self, text):
return """
##########################################################
# Sample .aider.conf.yaml
# Sample .aider.conf.yml
# This file lists *all* the valid configuration entries.
# Place in your home dir, or at the root of your git repo.
##########################################################

View file

@ -7,7 +7,7 @@ from pathlib import Path
import git
from aider import models, prompts, voice
from aider.help import Help, PipInstallHF
from aider.help import Help, install_help_extra
from aider.llm import litellm
from aider.scrape import Scraper
from aider.utils import is_image_file
@ -662,19 +662,12 @@ class Commands:
from aider.coders import Coder
if not self.help:
try:
self.help = Help()
except PipInstallHF as err:
self.io.tool_error(str(err))
if self.io.confirm_ask("Run pip install?", default="y"):
try:
self.help = Help(pip_install=True)
except PipInstallHF:
pass
res = install_help_extra(self.io)
if not res:
self.io.tool_error("Unable to initialize interactive help.")
return
if not self.help:
self.io.tool_error("Unable to initialize interactive help.")
return
self.help = Help()
coder = Coder.create(
main_model=self.coder.main_model,

View file

@ -13,6 +13,21 @@ from aider.help_pats import exclude_website_pats
warnings.simplefilter("ignore", category=FutureWarning)
def install_help_extra(io):
pip_install_cmd = [
"aider-chat[hf-embed]",
"--extra-index-url",
"https://download.pytorch.org/whl/cpu",
]
res = utils.check_pip_install_extra(
io,
"llama_index.embeddings.huggingface",
"To use interactive /help you need to install HuggingFace embeddings",
pip_install_cmd,
)
return res
def get_package_files():
for path in importlib_resources.files("aider.website").iterdir():
if path.is_file():
@ -87,35 +102,10 @@ def get_index():
return index
class PipInstallHF(Exception):
pass
pip_install_cmd = [
"aider-chat[hf-embed]",
"--extra-index-url",
"https://download.pytorch.org/whl/cpu",
]
pip_install_error = """
To use interactive /help you need to install HuggingFace embeddings:
{cmd}
""" # noqa: E231
class Help:
def __init__(self, pip_install=False):
cmd = utils.get_pip_install(pip_install_cmd)
if pip_install:
utils.run_install(cmd)
try:
from llama_index.core import Settings
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
except ImportError:
raise PipInstallHF(pip_install_error.format(cmd=' '.join(cmd)))
def __init__(self):
from llama_index.core import Settings
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
os.environ["TOKENIZERS_PARALLELISM"] = "true"
Settings.embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")

View file

@ -148,6 +148,15 @@ def scrub_sensitive_info(args, text):
return text
def check_streamlit_install(io):
return utils.check_pip_install_extra(
io,
"streamlit",
"You need to install the aider browser feature",
["aider-chat[browser]"],
)
def launch_gui(args):
from streamlit.web import cli
@ -318,10 +327,6 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
litellm.client_session = httpx.Client(verify=False)
if args.gui and not return_coder:
launch_gui(argv)
return
if args.dark_mode:
args.user_input_color = "#32FF32"
args.tool_error_color = "#FF3333"
@ -355,6 +360,12 @@ def main(argv=None, input=None, output=None, force_git_root=None, return_coder=F
editingmode=editing_mode,
)
if args.gui and not return_coder:
if not check_streamlit_install(io):
return
launch_gui(argv)
return
for fname in loaded_dotenvs:
io.tool_output(f"Loaded {fname}")

View file

@ -58,6 +58,7 @@ ANTHROPIC_MODELS = [ln.strip() for ln in ANTHROPIC_MODELS.splitlines() if ln.str
@dataclass
class ModelSettings:
# Model class needs to have each of these as well
name: str
edit_format: str
weak_model_name: Optional[str] = None
@ -360,6 +361,7 @@ class Model:
lazy = False
reminder_as_sys_msg = False
examples_as_sys_msg = False
can_prefill = False
max_chat_history_tokens = 1024
weak_model = None
@ -652,11 +654,7 @@ def sanity_check_model(io, model):
if possible_matches:
io.tool_output("Did you mean one of these?")
for match in possible_matches:
fq, m = match
if fq == m:
io.tool_output(f"- {m}")
else:
io.tool_output(f"- {m} ({fq})")
io.tool_output(f"- {model}")
if show:
io.tool_output(f"For more info, see: {urls.model_warnings}\n")
@ -665,7 +663,7 @@ def sanity_check_model(io, model):
def fuzzy_match_models(name):
name = name.lower()
chat_models = []
chat_models = set()
for model, attrs in litellm.model_cost.items():
model = model.lower()
if attrs.get("mode") != "chat":
@ -677,8 +675,10 @@ def fuzzy_match_models(name):
else:
fq_model = provider + model
chat_models.append((fq_model, model))
chat_models.add(fq_model)
chat_models.add(model)
chat_models = sorted(chat_models)
# exactly matching model
# matching_models = [
# (fq,m) for fq,m in chat_models
@ -688,19 +688,14 @@ def fuzzy_match_models(name):
# return matching_models
# Check for model names containing the name
matching_models = [(fq, m) for fq, m in chat_models if name in fq]
matching_models = [m for m in chat_models if name in m]
if matching_models:
return matching_models
# Check for slight misspellings
models = [m for fq, m in chat_models]
models = list(chat_models)
matching_models = difflib.get_close_matches(name, models, n=3, cutoff=0.8)
if matching_models:
return list(zip(matching_models, matching_models))
fq_models = [fq for fq, m in chat_models]
matching_models = difflib.get_close_matches(name, fq_models, n=3, cutoff=0.8)
return list(zip(matching_models, matching_models))
return sorted(matching_models)
def print_matching_models(io, search):
@ -708,8 +703,7 @@ def print_matching_models(io, search):
if matches:
io.tool_output(f'Models which match "{search}":')
for model in matches:
fq, m = model
io.tool_output(f"- {fq}")
io.tool_output(f"- {model}")
else:
io.tool_output(f'No models match "{search}".')

View file

@ -1,11 +1,13 @@
import unittest
from aider.help import Help
from aider.help import Help, install_help_extra
from aider.io import InputOutput
class TestHelp(unittest.TestCase):
def setUp(self):
Help(pip_install=True)
io = InputOutput(yes=True)
install_help_extra(io)
def test_init(self):
help_inst = Help()

View file

@ -1,8 +1,8 @@
import itertools
import os
import subprocess
import sys
import tempfile
import itertools
from pathlib import Path
import git
@ -182,7 +182,6 @@ def split_chat_history_markdown(text, include_tool=False):
def get_pip_install(args):
cmd = [
sys.executable,
"-m",
@ -192,14 +191,22 @@ def get_pip_install(args):
cmd += args
return cmd
def run_install(cmd):
print()
print("Installing: ", ' '.join(cmd))
print("Installing: ", " ".join(cmd))
try:
process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, bufsize=1, universal_newlines=True)
output = []
spinner = itertools.cycle(['', '', '', '', '', '', '', '', '', ''])
process = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
bufsize=1,
universal_newlines=True,
)
spinner = itertools.cycle(["", "", "", "", "", "", "", "", "", ""])
for line in process.stdout:
output.append(line)
@ -208,14 +215,45 @@ def run_install(cmd):
return_code = process.wait()
if return_code == 0:
print("\rInstallation completed successfully.")
print("\rInstallation complete.")
print()
return True
return True, output
except subprocess.CalledProcessError as e:
print(f"\nError running pip install: {e}")
print("\nInstallation failed.\n")
return False, output
def check_pip_install_extra(io, module, prompt, pip_install_cmd):
try:
__import__(module)
return True
except (ImportError, ModuleNotFoundError):
pass
cmd = get_pip_install(pip_install_cmd)
text = f"{prompt}:\n\n{' '.join(cmd)}\n"
io.tool_error(text)
if not io.confirm_ask("Run pip install?", default="y"):
return
success, output = run_install(cmd)
if not success:
return
try:
__import__(module)
return True
except (ImportError, ModuleNotFoundError):
pass
for line in output:
print(line)
print()
print(f"Failed to install {pip_install_cmd[0]}")

View file

@ -29,9 +29,9 @@ def check_version(io, just_check=False):
except Exception as err:
io.tool_error(f"Error checking pypi for new version: {err}")
return False
fname.parent.mkdir(parents=True, exist_ok=True)
fname.touch()
finally:
fname.parent.mkdir(parents=True, exist_ok=True)
fname.touch()
if just_check:
return is_update_available
@ -49,7 +49,8 @@ Newer aider version v{latest_version} is available. To upgrade, run:
io.tool_error(text)
if io.confirm_ask("Run pip install?"):
if utils.run_install(cmd):
success, _output = utils.run_install(cmd)
if success:
io.tool_output("Re-run aider to use new version.")
sys.exit()

View file

@ -85,8 +85,12 @@ class Voice:
self.start_time = time.time()
with self.sd.InputStream(samplerate=sample_rate, channels=1, callback=self.callback):
prompt(self.get_prompt, refresh_interval=0.1)
try:
with self.sd.InputStream(samplerate=sample_rate, channels=1, callback=self.callback):
prompt(self.get_prompt, refresh_interval=0.1)
except self.sd.PortAudioError as err:
print(err)
return
with sf.SoundFile(filename, mode="x", samplerate=sample_rate, channels=1) as file:
while not self.q.empty():

View file

@ -681,4 +681,26 @@
versions: 0.41.1-dev
seconds_per_case: 7.1
total_cost: 0.1946
- dirname: 2024-07-09-10-12-27--gemma2:27b-instruct-q8_0
test_cases: 133
model: gemma2:27b-instruct-q8_0
edit_format: whole
commit_hash: f9d96ac-dirty
pass_rate_1: 31.6
pass_rate_2: 36.1
percent_cases_well_formed: 100.0
error_outputs: 35
num_malformed_responses: 0
num_with_malformed_responses: 0
user_asks: 35
lazy_comments: 2
syntax_errors: 0
indentation_errors: 0
exhausted_context_windows: 0
test_timeouts: 3
command: aider --model ollama/gemma2:27b-instruct-q8_0
date: 2024-07-09
versions: 0.43.0
seconds_per_case: 101.3
total_cost: 0.0000

View file

@ -1,5 +1,5 @@
##########################################################
# Sample .aider.conf.yaml
# Sample .aider.conf.yml
# This file lists *all* the valid configuration entries.
# Place in your home dir, or at the root of your git repo.
##########################################################
@ -207,10 +207,10 @@
#version:
## Check for updates and return status in the exit code
#check-update: false
#just-check-update: false
## Skips checking for the update when the program runs
#skip-check-update: false
## Check for new aider versions on launch
#check-update: true
## Apply the changes from the given file instead of running the chat (debug)
#apply:

View file

@ -208,10 +208,10 @@
#AIDER_VOICE_LANGUAGE=en
## Check for updates and return status in the exit code
#AIDER_CHECK_UPDATE=false
#AIDER_JUST_CHECK_UPDATE=false
## Skips checking for the update when the program runs
#AIDER_SKIP_CHECK_UPDATE=false
## Check for new aider versions on launch
#AIDER_CHECK_UPDATE=true
## Apply the changes from the given file instead of running the chat (debug)
#AIDER_APPLY=

View file

@ -38,7 +38,7 @@ cog.outl("```")
]]]-->
```
##########################################################
# Sample .aider.conf.yaml
# Sample .aider.conf.yml
# This file lists *all* the valid configuration entries.
# Place in your home dir, or at the root of your git repo.
##########################################################
@ -246,10 +246,10 @@ cog.outl("```")
#version:
## Check for updates and return status in the exit code
#check-update: false
#just-check-update: false
## Skips checking for the update when the program runs
#skip-check-update: false
## Check for new aider versions on launch
#check-update: true
## Apply the changes from the given file instead of running the chat (debug)
#apply:

View file

@ -250,10 +250,10 @@ cog.outl("```")
#AIDER_VOICE_LANGUAGE=en
## Check for updates and return status in the exit code
#AIDER_CHECK_UPDATE=false
#AIDER_JUST_CHECK_UPDATE=false
## Skips checking for the update when the program runs
#AIDER_SKIP_CHECK_UPDATE=false
## Check for new aider versions on launch
#AIDER_CHECK_UPDATE=true
## Apply the changes from the given file instead of running the chat (debug)
#AIDER_APPLY=

View file

@ -51,10 +51,11 @@ usage: aider [-h] [--openai-api-key] [--anthropic-api-key] [--model]
[--dry-run | --no-dry-run] [--commit] [--lint]
[--lint-cmd] [--auto-lint | --no-auto-lint]
[--test-cmd] [--auto-test | --no-auto-test] [--test]
[--vim] [--voice-language] [--version] [--check-update]
[--skip-check-update] [--apply] [--yes] [-v]
[--show-repo-map] [--show-prompts] [--exit] [--message]
[--message-file] [--encoding] [-c] [--gui]
[--vim] [--voice-language] [--version]
[--just-check-update]
[--check-update | --no-check-update] [--apply] [--yes]
[-v] [--show-repo-map] [--show-prompts] [--exit]
[--message] [--message-file] [--encoding] [-c] [--gui]
```
@ -396,15 +397,18 @@ Environment variable: `AIDER_VOICE_LANGUAGE`
### `--version`
Show the version number and exit
### `--check-update`
### `--just-check-update`
Check for updates and return status in the exit code
Default: False
Environment variable: `AIDER_CHECK_UPDATE`
Environment variable: `AIDER_JUST_CHECK_UPDATE`
### `--skip-check-update`
Skips checking for the update when the program runs
Default: False
Environment variable: `AIDER_SKIP_CHECK_UPDATE`
### `--check-update`
Check for new aider versions on launch
Default: True
Environment variable: `AIDER_CHECK_UPDATE`
Aliases:
- `--check-update`
- `--no-check-update`
### `--apply FILE`
Apply the changes from the given file instead of running the chat (debug)

View file

@ -71,6 +71,7 @@ cog.out(''.join(lines))
- DATABRICKS_API_KEY
- DEEPINFRA_API_KEY
- DEEPSEEK_API_KEY
- EMPOWER_API_KEY
- FIREWORKSAI_API_KEY
- FIREWORKS_AI_API_KEY
- FIREWORKS_API_KEY

View file

@ -1,8 +1,8 @@
FROM python:3.10-slim
RUN apt-get update
RUN apt-get install -y less git build-essential
COPY requirements-dev.txt /aider/requirements-dev.txt
COPY . /aider
RUN pip install --no-cache-dir --upgrade pip
RUN pip install --no-cache-dir -r /aider/requirements-dev.txt
RUN pip install --no-cache-dir /aider[dev]
RUN git config --global --add safe.directory /aider
WORKDIR /aider

View file

@ -2,14 +2,12 @@
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile requirements.in
# pip-compile --output-file=requirements.txt requirements/requirements.in
#
aiohttp==3.9.5
# via litellm
aiosignal==1.3.1
# via aiohttp
altair==5.3.0
# via streamlit
annotated-types==0.7.0
# via pydantic
anyio==4.4.0
@ -22,15 +20,11 @@ attrs==23.2.0
# jsonschema
# referencing
backoff==2.2.1
# via -r requirements.in
# via -r requirements/requirements.in
beautifulsoup4==4.12.3
# via -r requirements.in
blinker==1.8.2
# via streamlit
# via -r requirements/requirements.in
cachetools==5.3.3
# via
# google-auth
# streamlit
# via google-auth
certifi==2024.7.4
# via
# httpcore
@ -43,21 +37,19 @@ cffi==1.16.0
charset-normalizer==3.3.2
# via requests
click==8.1.7
# via
# litellm
# streamlit
# via litellm
configargparse==1.7
# via -r requirements.in
# via -r requirements/requirements.in
diff-match-patch==20230430
# via -r requirements.in
# via -r requirements/requirements.in
diskcache==5.6.3
# via -r requirements.in
# via -r requirements/requirements.in
distro==1.9.0
# via openai
filelock==3.15.4
# via huggingface-hub
flake8==7.1.0
# via -r requirements.in
# via -r requirements/requirements.in
frozenlist==1.4.1
# via
# aiohttp
@ -67,9 +59,7 @@ fsspec==2024.6.1
gitdb==4.0.11
# via gitpython
gitpython==3.1.43
# via
# -r requirements.in
# streamlit
# via -r requirements/requirements.in
google-ai-generativelanguage==0.6.6
# via google-generativeai
google-api-core[grpc]==2.19.1
@ -89,7 +79,7 @@ google-auth==2.31.0
google-auth-httplib2==0.2.0
# via google-api-python-client
google-generativeai==0.7.1
# via -r requirements.in
# via -r requirements/requirements.in
googleapis-common-protos==1.63.2
# via
# google-api-core
@ -97,7 +87,7 @@ googleapis-common-protos==1.63.2
greenlet==3.0.3
# via playwright
grep-ast==0.3.2
# via -r requirements.in
# via -r requirements/requirements.in
grpcio==1.64.1
# via
# google-api-core
@ -124,24 +114,20 @@ idna==3.7
# yarl
importlib-metadata==7.2.1
# via
# -r requirements.in
# -r requirements/requirements.in
# litellm
importlib-resources==6.4.0
# via -r requirements.in
# via -r requirements/requirements.in
jinja2==3.1.4
# via
# altair
# litellm
# pydeck
# via litellm
jsonschema==4.22.0
# via
# -r requirements.in
# altair
# -r requirements/requirements.in
# litellm
jsonschema-specifications==2023.12.1
# via jsonschema
litellm==1.41.6
# via -r requirements.in
# via -r requirements/requirements.in
markdown-it-py==3.0.0
# via rich
markupsafe==2.1.5
@ -155,40 +141,27 @@ multidict==6.0.5
# aiohttp
# yarl
networkx==3.2.1
# via -r requirements.in
# via -r requirements/requirements.in
numpy==1.26.4
# via
# -r requirements.in
# altair
# pandas
# pyarrow
# pydeck
# -r requirements/requirements.in
# scipy
# streamlit
openai==1.35.10
# via litellm
packaging==24.1
# via
# -r requirements.in
# altair
# -r requirements/requirements.in
# huggingface-hub
# streamlit
pandas==2.2.2
# via
# altair
# streamlit
pathspec==0.12.1
# via
# -r requirements.in
# -r requirements/requirements.in
# grep-ast
pillow==10.4.0
# via
# -r requirements.in
# streamlit
# via -r requirements/requirements.in
playwright==1.45.0
# via -r requirements.in
# via -r requirements/requirements.in
prompt-toolkit==3.0.47
# via -r requirements.in
# via -r requirements/requirements.in
proto-plus==1.24.0
# via
# google-ai-generativelanguage
@ -201,9 +174,6 @@ protobuf==4.25.3
# googleapis-common-protos
# grpcio-status
# proto-plus
# streamlit
pyarrow==16.1.0
# via streamlit
pyasn1==0.6.0
# via
# pyasn1-modules
@ -221,8 +191,6 @@ pydantic==2.8.2
# openai
pydantic-core==2.20.1
# via pydantic
pydeck==0.9.1
# via streamlit
pyee==11.1.0
# via playwright
pyflakes==3.2.0
@ -230,18 +198,14 @@ pyflakes==3.2.0
pygments==2.18.0
# via rich
pypandoc==1.13
# via -r requirements.in
# via -r requirements/requirements.in
pyparsing==3.1.2
# via httplib2
python-dateutil==2.9.0.post0
# via pandas
python-dotenv==1.0.1
# via litellm
pytz==2024.1
# via pandas
pyyaml==6.0.1
# via
# -r requirements.in
# -r requirements/requirements.in
# huggingface-hub
referencing==0.35.1
# via
@ -254,12 +218,9 @@ requests==2.32.3
# google-api-core
# huggingface-hub
# litellm
# streamlit
# tiktoken
rich==13.7.1
# via
# -r requirements.in
# streamlit
# via -r requirements/requirements.in
rpds-py==0.18.1
# via
# jsonschema
@ -267,9 +228,7 @@ rpds-py==0.18.1
rsa==4.9
# via google-auth
scipy==1.13.1
# via -r requirements.in
six==1.16.0
# via python-dateutil
# via -r requirements/requirements.in
smmap==5.0.1
# via gitdb
sniffio==1.3.1
@ -278,25 +237,15 @@ sniffio==1.3.1
# httpx
# openai
sounddevice==0.4.7
# via -r requirements.in
# via -r requirements/requirements.in
soundfile==0.12.1
# via -r requirements.in
# via -r requirements/requirements.in
soupsieve==2.5
# via beautifulsoup4
streamlit==1.36.0
# via -r requirements.in
tenacity==8.4.2
# via streamlit
tiktoken==0.7.0
# via litellm
tokenizers==0.19.1
# via litellm
toml==0.10.2
# via streamlit
toolz==0.12.1
# via altair
tornado==6.4.1
# via streamlit
tqdm==4.66.4
# via
# google-generativeai
@ -304,7 +253,7 @@ tqdm==4.66.4
# openai
tree-sitter==0.21.3
# via
# -r requirements.in
# -r requirements/requirements.in
# tree-sitter-languages
tree-sitter-languages==1.10.2
# via grep-ast
@ -316,15 +265,10 @@ typing-extensions==4.12.2
# pydantic
# pydantic-core
# pyee
# streamlit
tzdata==2024.1
# via pandas
uritemplate==4.1.1
# via google-api-python-client
urllib3==2.2.2
# via requests
watchdog==4.0.1
# via -r requirements.in
wcwidth==0.2.13
# via prompt-toolkit
yarl==1.9.4

View file

@ -0,0 +1,4 @@
-c ../requirements.txt
streamlit
watchdog

View file

@ -0,0 +1,151 @@
#
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --output-file=requirements/requirements-browser.txt requirements/requirements-browser.in
#
altair==5.3.0
# via streamlit
attrs==23.2.0
# via
# -c requirements/../requirements.txt
# jsonschema
# referencing
blinker==1.8.2
# via streamlit
cachetools==5.3.3
# via
# -c requirements/../requirements.txt
# streamlit
certifi==2024.7.4
# via
# -c requirements/../requirements.txt
# requests
charset-normalizer==3.3.2
# via
# -c requirements/../requirements.txt
# requests
click==8.1.7
# via
# -c requirements/../requirements.txt
# streamlit
gitdb==4.0.11
# via
# -c requirements/../requirements.txt
# gitpython
gitpython==3.1.43
# via
# -c requirements/../requirements.txt
# streamlit
idna==3.7
# via
# -c requirements/../requirements.txt
# requests
jinja2==3.1.4
# via
# -c requirements/../requirements.txt
# altair
# pydeck
jsonschema==4.22.0
# via
# -c requirements/../requirements.txt
# altair
jsonschema-specifications==2023.12.1
# via
# -c requirements/../requirements.txt
# jsonschema
markdown-it-py==3.0.0
# via
# -c requirements/../requirements.txt
# rich
markupsafe==2.1.5
# via
# -c requirements/../requirements.txt
# jinja2
mdurl==0.1.2
# via
# -c requirements/../requirements.txt
# markdown-it-py
numpy==1.26.4
# via
# -c requirements/../requirements.txt
# altair
# pandas
# pyarrow
# pydeck
# streamlit
packaging==24.1
# via
# -c requirements/../requirements.txt
# altair
# streamlit
pandas==2.2.2
# via
# altair
# streamlit
pillow==10.4.0
# via
# -c requirements/../requirements.txt
# streamlit
protobuf==4.25.3
# via
# -c requirements/../requirements.txt
# streamlit
pyarrow==16.1.0
# via streamlit
pydeck==0.9.1
# via streamlit
pygments==2.18.0
# via
# -c requirements/../requirements.txt
# rich
python-dateutil==2.9.0.post0
# via pandas
pytz==2024.1
# via pandas
referencing==0.35.1
# via
# -c requirements/../requirements.txt
# jsonschema
# jsonschema-specifications
requests==2.32.3
# via
# -c requirements/../requirements.txt
# streamlit
rich==13.7.1
# via
# -c requirements/../requirements.txt
# streamlit
rpds-py==0.18.1
# via
# -c requirements/../requirements.txt
# jsonschema
# referencing
six==1.16.0
# via python-dateutil
smmap==5.0.1
# via
# -c requirements/../requirements.txt
# gitdb
streamlit==1.36.0
# via -r requirements/requirements-browser.in
tenacity==8.4.2
# via streamlit
toml==0.10.2
# via streamlit
toolz==0.12.1
# via altair
tornado==6.4.1
# via streamlit
typing-extensions==4.12.2
# via
# -c requirements/../requirements.txt
# streamlit
tzdata==2024.1
# via pandas
urllib3==2.2.2
# via
# -c requirements/../requirements.txt
# requests
watchdog==4.0.1
# via -r requirements/requirements-browser.in

View file

@ -1,4 +1,4 @@
-c requirements.txt
-c ../requirements.txt
#
# pip-compile --output-file=requirements-dev.txt requirements-dev.in --upgrade
#

View file

@ -2,7 +2,7 @@
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --output-file=requirements-dev.txt requirements-dev.in
# pip-compile --output-file=requirements/requirements-dev.txt requirements/requirements-dev.in
#
alabaster==0.7.16
# via sphinx
@ -12,21 +12,21 @@ build==1.2.1
# via pip-tools
certifi==2024.7.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
cfgv==3.4.0
# via pre-commit
charset-normalizer==3.3.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
click==8.1.7
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# pip-tools
# typer
cogapp==3.4.1
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
contourpy==1.2.1
# via matplotlib
cycler==0.12.1
@ -43,43 +43,43 @@ docutils==0.20.1
# sphinx-rtd-theme
filelock==3.15.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# virtualenv
fonttools==4.53.0
fonttools==4.53.1
# via matplotlib
identify==2.5.36
# via pre-commit
idna==3.7
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
imagesize==1.4.1
# via sphinx
imgcat==0.5.0
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
iniconfig==2.0.0
# via pytest
jinja2==3.1.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# sphinx
kiwisolver==1.4.5
# via matplotlib
lox==0.12.0
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
markdown-it-py==3.0.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# rich
markupsafe==2.1.5
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# jinja2
matplotlib==3.9.1
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
mdurl==0.1.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# markdown-it-py
multiprocess==0.70.16
# via pathos
@ -87,29 +87,27 @@ nodeenv==1.9.1
# via pre-commit
numpy==1.26.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# contourpy
# matplotlib
# pandas
packaging==24.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# build
# matplotlib
# pytest
# sphinx
pandas==2.2.2
# via
# -c requirements.txt
# -r requirements-dev.in
# via -r requirements/requirements-dev.in
pathos==0.3.2
# via lox
pillow==10.4.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# matplotlib
pip-tools==7.4.1
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
platformdirs==4.2.2
# via virtualenv
pluggy==1.5.0
@ -119,49 +117,44 @@ pox==0.3.4
ppft==1.7.6.8
# via pathos
pre-commit==3.7.1
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
pygments==2.18.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# rich
# sphinx
pyparsing==3.1.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# matplotlib
pyproject-hooks==1.1.0
# via
# build
# pip-tools
pytest==8.2.2
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
python-dateutil==2.9.0.post0
# via
# -c requirements.txt
# matplotlib
# pandas
pytz==2024.1
# via
# -c requirements.txt
# pandas
# via pandas
pyyaml==6.0.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# pre-commit
requests==2.32.3
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# sphinx
rich==13.7.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# typer
shellingham==1.5.4
# via typer
six==1.16.0
# via
# -c requirements.txt
# python-dateutil
# via python-dateutil
snowballstemmer==2.2.0
# via sphinx
sphinx==7.3.7
@ -185,18 +178,16 @@ sphinxcontrib-qthelp==1.0.7
sphinxcontrib-serializinghtml==1.1.10
# via sphinx
typer==0.12.3
# via -r requirements-dev.in
# via -r requirements/requirements-dev.in
typing-extensions==4.12.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# typer
tzdata==2024.1
# via
# -c requirements.txt
# pandas
# via pandas
urllib3==2.2.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
virtualenv==20.26.3
# via pre-commit

View file

@ -1,4 +1,4 @@
-c requirements.txt
-c ../requirements.txt
#
# pip-compile --output-file=requirements-hf.txt requirements-hf.in --upgrade
#

View file

@ -2,43 +2,43 @@
# This file is autogenerated by pip-compile with Python 3.12
# by the following command:
#
# pip-compile --output-file=requirements-hf-embed.txt requirements-hf-embed.in
# pip-compile --output-file=requirements/requirements-hf-embed.txt requirements/requirements-hf-embed.in
#
aiohttp==3.9.5
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
aiosignal==1.3.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# aiohttp
annotated-types==0.7.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# pydantic
anyio==4.4.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# httpx
# openai
attrs==23.2.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# aiohttp
certifi==2024.7.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# httpcore
# httpx
# requests
charset-normalizer==3.3.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
click==8.1.7
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# nltk
dataclasses-json==0.6.7
# via llama-index-core
@ -48,60 +48,60 @@ dirtyjson==1.0.8
# via llama-index-core
distro==1.9.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# openai
filelock==3.15.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# torch
# transformers
frozenlist==1.4.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# aiohttp
# aiosignal
fsspec==2024.6.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
# torch
greenlet==3.0.3
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# sqlalchemy
h11==0.14.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# httpcore
httpcore==1.0.5
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# httpx
httpx==0.27.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-cloud
# llama-index-core
# openai
huggingface-hub[inference]==0.23.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-embeddings-huggingface
# sentence-transformers
# tokenizers
# transformers
idna==3.7
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# anyio
# httpx
# requests
# yarl
jinja2==3.1.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# torch
joblib==1.4.2
# via
@ -111,13 +111,13 @@ llama-cloud==0.0.6
# via llama-index-core
llama-index-core==0.10.52.post2
# via
# -r requirements-hf-embed.in
# -r requirements/requirements-hf-embed.in
# llama-index-embeddings-huggingface
llama-index-embeddings-huggingface==0.2.2
# via -r requirements-hf-embed.in
# via -r requirements/requirements-hf-embed.in
markupsafe==2.1.5
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# jinja2
marshmallow==3.21.3
# via dataclasses-json
@ -127,7 +127,7 @@ mpmath==1.3.0
# via sympy
multidict==6.0.5
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# aiohttp
# yarl
mypy-extensions==1.0.0
@ -136,14 +136,14 @@ nest-asyncio==1.6.0
# via llama-index-core
networkx==3.2.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-core
# torch
nltk==3.8.1
# via llama-index-core
numpy==1.26.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-core
# pandas
# scikit-learn
@ -152,55 +152,49 @@ numpy==1.26.4
# transformers
openai==1.35.10
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-core
packaging==24.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# marshmallow
# transformers
pandas==2.2.2
# via
# -c requirements.txt
# llama-index-core
# via llama-index-core
pillow==10.4.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-core
# sentence-transformers
pydantic==2.8.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-cloud
# openai
pydantic-core==2.20.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# pydantic
python-dateutil==2.9.0.post0
# via
# -c requirements.txt
# pandas
# via pandas
pytz==2024.1
# via
# -c requirements.txt
# pandas
# via pandas
pyyaml==6.0.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
# transformers
regex==2024.5.15
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# nltk
# tiktoken
# transformers
requests==2.32.3
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
# tiktoken
@ -211,18 +205,16 @@ scikit-learn==1.5.1
# via sentence-transformers
scipy==1.13.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# scikit-learn
# sentence-transformers
sentence-transformers==3.0.1
# via llama-index-embeddings-huggingface
six==1.16.0
# via
# -c requirements.txt
# python-dateutil
# via python-dateutil
sniffio==1.3.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# anyio
# httpx
# openai
@ -233,24 +225,22 @@ sqlalchemy[asyncio]==2.0.31
sympy==1.13.0
# via torch
tenacity==8.4.2
# via
# -c requirements.txt
# llama-index-core
# via llama-index-core
threadpoolctl==3.5.0
# via scikit-learn
tiktoken==0.7.0
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# llama-index-core
tokenizers==0.19.1
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# transformers
torch==2.2.2
# via sentence-transformers
tqdm==4.66.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
# nltk
@ -261,7 +251,7 @@ transformers==4.42.3
# via sentence-transformers
typing-extensions==4.12.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# huggingface-hub
# llama-index-core
# openai
@ -275,12 +265,10 @@ typing-inspect==0.9.0
# dataclasses-json
# llama-index-core
tzdata==2024.1
# via
# -c requirements.txt
# pandas
# via pandas
urllib3==2.2.2
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# requests
wrapt==1.16.0
# via
@ -288,5 +276,5 @@ wrapt==1.16.0
# llama-index-core
yarl==1.9.4
# via
# -c requirements.txt
# -c requirements/../requirements.txt
# aiohttp

View file

@ -22,15 +22,22 @@ diff-match-patch
playwright
pypandoc
litellm
google-generativeai
streamlit
watchdog
flake8
importlib_resources
# v3.3 no longer works on python 3.9
# The proper depdendency is networkx[default], but this brings
# in matplotlib and a bunch of other deps
# https://github.com/networkx/networkx/blob/d7132daa8588f653eacac7a5bae1ee85a183fa43/pyproject.toml#L57
# We really only need networkx itself and scipy for the repomap.
# Pin below v3.3 to retain python 3.9 compatibility.
networkx<3.3
# This is the one networkx dependency that we need.
# Including it here explicitly because we
# didn't specify networkx[default] above.
# Pin below 1.14 to retain python 3.9 compatibility.
scipy<1.14
# v0.22.2 seems to break tree-sitter-languages?
tree-sitter==0.21.3
@ -39,5 +46,3 @@ tree-sitter==0.21.3
# Uses importlib-metadata
importlib-metadata<8.0.0
# To retain python 3.9 compatibility
scipy<1.14

View file

@ -3,7 +3,16 @@
# exit when any command fails
set -e
pip-compile requirements.in $1
pip-compile --output-file=requirements-dev.txt requirements-dev.in $1
pip-compile --output-file=requirements-hf-embed.txt requirements-hf-embed.in $1
pip-compile \
requirements/requirements.in \
--output-file=requirements.txt \
$1
for SUFFIX in dev hf-embed browser; do
pip-compile \
requirements/requirements-${SUFFIX}.in \
--output-file=requirements/requirements-${SUFFIX}.txt \
$1
done

View file

@ -1,4 +1,5 @@
import re
from pathlib import Path
from setuptools import find_packages, setup
@ -7,15 +8,21 @@ from aider.help_pats import exclude_website_pats
def get_requirements(suffix=""):
fname = "requirements" + suffix + ".txt"
with open(fname) as f:
requirements = f.read().splitlines()
if suffix:
fname = "requirements-" + suffix + ".txt"
fname = Path("requirements") / fname
else:
fname = Path("requirements.txt")
requirements = fname.read_text().splitlines()
return requirements
requirements = get_requirements()
dev_requirements = get_requirements("-dev")
hf_requirements = get_requirements("-hf-embed")
dev_requirements = get_requirements("dev")
hf_requirements = get_requirements("hf-embed")
browser_requirements = get_requirements("browser")
# README
with open("README.md", "r", encoding="utf-8") as f:
@ -41,6 +48,7 @@ setup(
extras_require={
"dev": dev_requirements,
"hf-embed": hf_requirements,
"browser": browser_requirements,
},
python_requires=">=3.9,<3.13",
entry_points={