Merge branch 'main' into mixpanel

This commit is contained in:
Paul Gauthier 2024-10-30 09:40:01 -07:00
commit 068fb38a5d
181 changed files with 141428 additions and 1961 deletions

View file

@ -26,6 +26,7 @@ def blame(start_tag, end_tag=None):
if f.endswith((".py", ".scm", ".sh", "Dockerfile", "Gemfile"))
or (f.startswith(".github/workflows/") and f.endswith(".yml"))
]
files = [f for f in files if not f.endswith("prompts.py")]
all_file_counts = {}
grand_total = defaultdict(int)

169
scripts/issues.py Executable file
View file

@ -0,0 +1,169 @@
#!/usr/bin/env python3
import argparse
import os
import re
from collections import defaultdict
from datetime import datetime
import requests
from dotenv import load_dotenv
from tqdm import tqdm
def has_been_reopened(issue_number):
timeline_url = f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue_number}/timeline"
response = requests.get(timeline_url, headers=headers)
response.raise_for_status()
events = response.json()
return any(event["event"] == "reopened" for event in events if "event" in event)
# Load environment variables from .env file
load_dotenv()
DUPLICATE_COMMENT = """Thanks for trying aider and filing this issue.
This looks like a duplicate of #{oldest_issue_number}. Please see the comments there for more information, and feel free to continue the discussion within that issue.
I'm going to close this issue for now. But please let me know if you think this is actually a distinct issue and I will reopen this issue.""" # noqa
# GitHub API configuration
GITHUB_API_URL = "https://api.github.com"
REPO_OWNER = "Aider-AI"
REPO_NAME = "aider"
TOKEN = os.getenv("GITHUB_TOKEN")
headers = {"Authorization": f"token {TOKEN}", "Accept": "application/vnd.github.v3+json"}
def get_issues(state="open"):
issues = []
page = 1
per_page = 100
# First, get the total count of issues
response = requests.get(
f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues",
headers=headers,
params={"state": state, "per_page": 1},
)
response.raise_for_status()
total_count = int(response.headers.get("Link", "").split("page=")[-1].split(">")[0])
total_pages = (total_count + per_page - 1) // per_page
with tqdm(total=total_pages, desc="Collecting issues", unit="page") as pbar:
while True:
response = requests.get(
f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues",
headers=headers,
params={"state": state, "page": page, "per_page": per_page},
)
response.raise_for_status()
page_issues = response.json()
if not page_issues:
break
issues.extend(page_issues)
page += 1
pbar.update(1)
return issues
def group_issues_by_subject(issues):
grouped_issues = defaultdict(list)
pattern = r"Uncaught .+ in .+ line \d+"
for issue in issues:
if re.search(pattern, issue["title"]) and not has_been_reopened(issue["number"]):
subject = issue["title"]
grouped_issues[subject].append(issue)
return grouped_issues
def find_oldest_issue(subject, all_issues):
oldest_issue = None
oldest_date = datetime.now()
for issue in all_issues:
if issue["title"] == subject and not has_been_reopened(issue["number"]):
created_at = datetime.strptime(issue["created_at"], "%Y-%m-%dT%H:%M:%SZ")
if created_at < oldest_date:
oldest_date = created_at
oldest_issue = issue
return oldest_issue
def comment_and_close_duplicate(issue, oldest_issue):
comment_url = (
f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue['number']}/comments"
)
close_url = f"{GITHUB_API_URL}/repos/{REPO_OWNER}/{REPO_NAME}/issues/{issue['number']}"
comment_body = DUPLICATE_COMMENT.format(oldest_issue_number=oldest_issue["number"])
# Post comment
response = requests.post(comment_url, headers=headers, json={"body": comment_body})
response.raise_for_status()
# Close issue
response = requests.patch(close_url, headers=headers, json={"state": "closed"})
response.raise_for_status()
print(f" - Commented and closed issue #{issue['number']}")
def main():
parser = argparse.ArgumentParser(description="Handle duplicate GitHub issues")
parser.add_argument(
"--yes", action="store_true", help="Automatically close duplicates without prompting"
)
args = parser.parse_args()
if not TOKEN:
print("Error: Missing GITHUB_TOKEN environment variable. Please check your .env file.")
return
all_issues = get_issues("all")
open_issues = [issue for issue in all_issues if issue["state"] == "open"]
grouped_open_issues = group_issues_by_subject(open_issues)
print("Analyzing issues (skipping reopened issues)...")
for subject, issues in grouped_open_issues.items():
oldest_issue = find_oldest_issue(subject, all_issues)
if not oldest_issue:
continue
related_issues = set(issue["number"] for issue in issues)
related_issues.add(oldest_issue["number"])
if len(related_issues) <= 1:
continue
print(f"\nIssue: {subject}")
print(f"Open issues: {len(issues)}")
sorted_issues = sorted(issues, key=lambda x: x["number"], reverse=True)
for issue in sorted_issues:
print(f" - #{issue['number']}: {issue['comments']} comments {issue['html_url']}")
print(
f"Oldest issue: #{oldest_issue['number']}: {oldest_issue['comments']} comments"
f" {oldest_issue['html_url']} ({oldest_issue['state']})"
)
if not args.yes:
# Confirmation prompt
confirm = input("Do you want to comment and close duplicate issues? (y/n): ")
if confirm.lower() != "y":
print("Skipping this group of issues.")
continue
# Comment and close duplicate issues
for issue in issues:
if issue["number"] != oldest_issue["number"]:
comment_and_close_duplicate(issue, oldest_issue)
if oldest_issue["state"] == "open":
print(f"Oldest issue #{oldest_issue['number']} left open")
if __name__ == "__main__":
main()

View file

@ -21,5 +21,7 @@ cog $ARG \
aider/website/docs/config/dotenv.md \
aider/website/docs/config/options.md \
aider/website/docs/config/aider_conf.md \
aider/website/docs/config/adv-model-settings.md \
aider/website/docs/leaderboards/index.md \
aider/website/docs/llms/other.md
aider/website/docs/llms/other.md \
aider/website/docs/more/infinite-output.md

View file

@ -2,6 +2,7 @@
import argparse
import datetime
import os
import re
import subprocess
import sys
@ -95,15 +96,15 @@ def main():
f"{new_version.major}.{new_version.minor}.{new_version.micro + 1}"
)
with open("aider/__init__.py", "r") as f:
content = f.read()
from aider import __version__ as current_version
current_version = re.search(r'__version__ = "(.+?)"', content).group(1)
if new_version <= version.parse(current_version):
raise ValueError(
f"New version {new_version} must be greater than the current version {current_version}"
)
with open("aider/__init__.py", "r") as f:
content = f.read()
updated_content = re.sub(r'__version__ = ".+?"', f'__version__ = "{new_version}"', content)
print("Updating aider/__init__.py with new version:")
@ -117,7 +118,7 @@ def main():
["git", "commit", "-m", f"version bump to {new_version}"],
["git", "tag", f"v{new_version}"],
["git", "push", "origin"],
["git", "push", "origin", f"v{new_version}"],
["git", "push", "origin", f"v{new_version}", "--no-verify"],
]
for cmd in git_commands:
@ -125,8 +126,9 @@ def main():
if not dry_run:
subprocess.run(cmd, check=True)
new_dev_version = f"{incremented_version}.dev"
updated_dev_content = re.sub(
r'__version__ = ".+?"', f'__version__ = "{incremented_version}-dev"', content
r'__version__ = ".+?"', f'__version__ = "{new_dev_version}"', content
)
print()
@ -138,8 +140,10 @@ def main():
git_commands_dev = [
["git", "add", "aider/__init__.py"],
["git", "commit", "-m", f"set version to {incremented_version}-dev"],
["git", "push", "origin"],
["git", "commit", "-m", f"set version to {new_dev_version}"],
["git", "tag", f"v{new_dev_version}"],
["git", "push", "origin", "--no-verify"],
["git", "push", "origin", f"v{new_dev_version}", "--no-verify"],
]
for cmd in git_commands_dev:
@ -147,6 +151,13 @@ def main():
if not dry_run:
subprocess.run(cmd, check=True)
# Remove aider/__version__.py if it exists
version_file = "aider/__version__.py"
if os.path.exists(version_file):
print(f"Removing {version_file}")
if not dry_run:
os.remove(version_file)
if __name__ == "__main__":
main()

View file

@ -0,0 +1,51 @@
import requests
from packaging import version
from packaging.specifiers import SpecifierSet
def get_versions_supporting_python38_or_lower(package_name):
url = f"https://pypi.org/pypi/{package_name}/json"
response = requests.get(url)
if response.status_code != 200:
print(f"Failed to fetch data for {package_name}")
return {}
data = response.json()
compatible_versions = {}
for release, release_data in data["releases"].items():
if not release_data: # Skip empty releases
continue
requires_python = release_data[0].get("requires_python")
if requires_python is None:
compatible_versions[release] = (
"Unspecified (assumed compatible with Python 3.8 and lower)"
)
else:
try:
spec = SpecifierSet(requires_python)
if version.parse("3.8") in spec:
compatible_versions[release] = (
f"Compatible with Python 3.8 (spec: {requires_python})"
)
except ValueError:
print(f"Invalid requires_python specifier for version {release}: {requires_python}")
return compatible_versions
def main():
package_name = "aider-chat" # Replace with your package name
compatible_versions = get_versions_supporting_python38_or_lower(package_name)
print(f"Versions of {package_name} compatible with Python 3.8 or lower:")
for release, support in sorted(
compatible_versions.items(), key=lambda x: version.parse(x[0]), reverse=True
):
print(f"{release}: {support}")
if __name__ == "__main__":
main()