mirror of
https://github.com/Aider-AI/aider.git
synced 2025-05-24 22:34:59 +00:00
wip
This commit is contained in:
parent
fceaa0504c
commit
af48cc3e4c
7 changed files with 180 additions and 47 deletions
|
@ -5,8 +5,8 @@ import sys
|
|||
import warnings
|
||||
from pathlib import Path
|
||||
|
||||
from tqdm import tqdm
|
||||
import importlib_resources
|
||||
from tqdm import tqdm
|
||||
|
||||
from aider.dump import dump # noqa: F401
|
||||
|
||||
|
@ -14,16 +14,20 @@ warnings.simplefilter("ignore", category=FutureWarning)
|
|||
|
||||
|
||||
def get_package_files():
|
||||
website_files = importlib_resources.files('website')
|
||||
for path in importlib_resources.files('website').iterdir():
|
||||
if path.is_file() and path.name.endswith('.md'):
|
||||
if not any(part.startswith(('OLD', 'tmp')) or part in ('examples', '_posts') for part in path.parts):
|
||||
dump(path)
|
||||
for path in importlib_resources.files("website").iterdir():
|
||||
dump(path)
|
||||
if path.is_file() and path.name.endswith(".md"):
|
||||
if not any(
|
||||
part.startswith(("OLD", "tmp")) or part in ("examples", "_posts")
|
||||
for part in path.parts
|
||||
):
|
||||
yield str(path)
|
||||
elif path.is_dir():
|
||||
for subpath in path.rglob('*.md'):
|
||||
if not any(part.startswith(('OLD', 'tmp')) or part in ('examples', '_posts') for part in subpath.parts):
|
||||
dump(subpath)
|
||||
for subpath in path.rglob("*.md"):
|
||||
if not any(
|
||||
part.startswith(("OLD", "tmp")) or part in ("examples", "_posts")
|
||||
for part in subpath.parts
|
||||
):
|
||||
yield str(subpath)
|
||||
|
||||
|
||||
|
@ -35,9 +39,6 @@ def fname_to_url(filepath):
|
|||
docid = ""
|
||||
if filepath.startswith("website/_includes/"):
|
||||
pass
|
||||
elif "HISTORY.html" in filepath:
|
||||
# too much stale info
|
||||
pass
|
||||
elif filepath.startswith(website):
|
||||
docid = filepath[len(website) :]
|
||||
|
||||
|
@ -73,8 +74,9 @@ def get_index():
|
|||
nodes = []
|
||||
for fname in tqdm(list(get_package_files())):
|
||||
fname = Path(fname)
|
||||
dump(fname)
|
||||
doc = Document(
|
||||
text=importlib_resources.files('website').joinpath(fname).read_text(),
|
||||
text=importlib_resources.files("website").joinpath(fname).read_text(),
|
||||
metadata=dict(
|
||||
filename=fname.name,
|
||||
extension=fname.suffix,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue